SFP's picture
Upload 7 files
7a1e0cc verified
raw
history blame
2.79 kB
import gradio as gr
import os
import time
import GPTSimple as ai
import random
import vision
# Chatbot demo with multimodal input (text, markdown, LaTeX, code blocks, image, audio, & video). Plus shows support for streaming text.
DESC = "# LLaMA 3.1 Vision\n<p>LLaMA 3.1 Vision uses LLaMA 3.1 405B and Florence 2 to give vision to LLaMA</p>"
def print_like_dislike(x: gr.LikeData):
print(x.index, x.value, x.liked)
def add_message(history, message):
for x in message["files"]:
history.append(((x,), None))
if message["text"] is not None:
history.append((message["text"], None))
return history, gr.MultimodalTextbox(value=None, interactive=False)
def bot(history):
his = [{"role": "system", "content": "you are a helpful assistant. you can\"see\" image that the user sends by the description being in [IMG][/IMG]. don't reference how you can only see a description"}]
nextone = ""
for i in history:
if isinstance(i[0], tuple):
nextone += "[IMG]" + vision.see_file(i[0][0]) + "[/IMG]\n"
else:
his.append({"role": "user", "content": nextone + i[0]})
nextone = ""
if i[1] is not None:
his.append({"role": "assistant", "content": i[1]})
chat = ai.conversation(base_url="deepinfra", model="meta-llama/Meta-Llama-3.1-405B-Instruct", history=his)
print(his)
stre = chat.generate(stream=True)
history[-1][1] = ""
for character in stre:
if character.token is not None:
history[-1][1] += character.token
yield history
def clear_history():
return [], {"text":"", "files":[]}
def retry_last(history):
history[-1][1]=None
res = bot(history)
for i in res:
yield i
with gr.Blocks(fill_height=True, theme=gr.themes.Soft(), css="style.css") as demo:
gr.Markdown(DESC)
chatbot = gr.Chatbot(
elem_id="chatbot",
bubble_full_width=False,
scale=1,
show_label=False
)
with gr.Row():
dl = gr.Button("", icon="delete.svg")
chat_input = gr.MultimodalTextbox(interactive=True,
file_count="multiple",
placeholder="Enter message or upload file...", show_label=False)
re = gr.Button("", icon="retry.svg")
chat_msg = chat_input.submit(add_message, [chatbot, chat_input], [chatbot, chat_input])
bot_msg = chat_msg.then(bot, chatbot, chatbot, api_name="bot_response")
bot_msg.then(lambda: gr.MultimodalTextbox(interactive=True), None, [chat_input])
dl.click(clear_history, None, [chatbot, chat_input])
re.click(retry_last, [chatbot], chatbot)
demo.queue()
demo.launch()