Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
@@ -164,6 +164,16 @@ with gr.Blocks(css=MODEL_SELECTION_CSS, theme='gradio/soft') as demo:
|
|
164 |
})
|
165 |
local_data = gr.JSON({}, visible=False)
|
166 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
167 |
with gr.Row():
|
168 |
with gr.Column(scale=1, min_width=180):
|
169 |
gr.Markdown("GradioChat", elem_id="left-top")
|
|
|
164 |
})
|
165 |
local_data = gr.JSON({}, visible=False)
|
166 |
|
167 |
+
gr.Markdown("## LLaMA2 70B with Gradio Chat and Hugging Face Inference API", elem_classes=["center"])
|
168 |
+
gr.Markdown(
|
169 |
+
"This space demonstrates how to build feature rich chatbot UI in [Gradio](https://www.gradio.app/). Supported features "
|
170 |
+
"include • multiple chatting channels, • chat history save/restoration, • stop generating text response, • regenerate the "
|
171 |
+
"last conversation, • clean the chat history, • dynamic kick-starting prompt templates, • adjusting text generation parameters, "
|
172 |
+
"• inspecting the actual prompt that the model sees. The underlying Large Language Model is the [Meta AI](https://ai.meta.com/)'s "
|
173 |
+
"[LLaMA2-70B](https://huggingface.co/meta-llama/Llama-2-70b-chat-hf) which is hosted as [Hugging Face Inference API](https://huggingface.co/inference-api), "
|
174 |
+
"and [Text Generation Inference](https://github.com/huggingface/text-generation-inference) is the underlying serving framework.",
|
175 |
+
elem_classes=["center"])
|
176 |
+
|
177 |
with gr.Row():
|
178 |
with gr.Column(scale=1, min_width=180):
|
179 |
gr.Markdown("GradioChat", elem_id="left-top")
|