|
import gradio as gr |
|
from transformers import Conversation, pipeline |
|
|
|
|
|
chatbot_pipeline = pipeline("conversational", model="Llama-2-70b-chat-hf", use_auth_token="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") |
|
|
|
def chatbot_response(input_text, history=[]): |
|
|
|
history.append(input_text) |
|
|
|
|
|
conversation = Conversation(input_text) |
|
|
|
|
|
responses = chatbot_pipeline([conversation]) |
|
model_response = responses[-1].generated_responses[-1] |
|
|
|
|
|
history.append(model_response) |
|
|
|
|
|
chat_log = [(u,b) for u,b in zip(history[::2], history[1::2])] |
|
|
|
return chat_log, history |
|
|
|
|
|
iface = gr.Interface( |
|
fn=chatbot_response, |
|
inputs=gr.inputs.Textbox(lines=2, placeholder='Type a message...'), |
|
outputs=[gr.outputs.Textbox(label="Chat History"), gr.outputs.Textbox(label="Chat Log")] |
|
) |
|
|
|
|
|
iface.launch() |