Spaces:
Running
Running
File size: 3,941 Bytes
2ac6564 b6e7cab 2ac6564 87c0e5b 70c4d17 87c0e5b 2ac6564 db00838 2ac6564 7a1fac7 2ac6564 abb9121 2ac6564 7a1fac7 2ac6564 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 |
import os
os.environ["GRADIO_ANALYTICS_ENABLED"] = "False"
os.system('pip install dashscope')
import gradio as gr
from http import HTTPStatus
import dashscope
from dashscope import Generation
from dashscope.api_entities.dashscope_response import Role
from typing import List, Optional, Tuple, Dict
from urllib.error import HTTPError
default_system = 'You are a helpful assistant.'
YOUR_API_TOKEN = os.getenv('YOUR_API_TOKEN')
dashscope.api_key = YOUR_API_TOKEN
History = List[Tuple[str, str]]
Messages = List[Dict[str, str]]
def clear_session() -> History:
return '', []
def modify_system_session(system: str) -> str:
if system is None or len(system) == 0:
system = default_system
return system, system, []
def history_to_messages(history: History, system: str) -> Messages:
messages = [{'role': Role.SYSTEM, 'content': system}]
for h in history:
messages.append({'role': Role.USER, 'content': h[0]})
messages.append({'role': Role.ASSISTANT, 'content': h[1]})
return messages
def messages_to_history(messages: Messages) -> Tuple[str, History]:
assert messages[0]['role'] == Role.SYSTEM
system = messages[0]['content']
history = []
for q, r in zip(messages[1::2], messages[2::2]):
history.append([q['content'], r['content']])
return system, history
def model_chat(query: Optional[str], history: Optional[History], system: str
) -> Tuple[str, str, History]:
if query is None:
query = ''
if history is None:
history = []
messages = history_to_messages(history, system)
messages.append({'role': Role.USER, 'content': query})
gen = Generation.call(
model = "qwen1.5-32b-chat",
messages=messages,
result_format='message',
stream=True
)
for response in gen:
if response.status_code == HTTPStatus.OK:
role = response.output.choices[0].message.role
response = response.output.choices[0].message.content
system, history = messages_to_history(messages + [{'role': role, 'content': response}])
yield '', history, system
else:
raise HTTPError('Request id: %s, Status code: %s, error code: %s, error message: %s' % (
response.request_id, response.status_code,
response.code, response.message
))
placeholder = """
<img src="https://camo.githubusercontent.com/c9ae5235df8d579b955cb8829a97aaf41cfcfe20b4a955276f7eb066fa2f88d4/68747470733a2f2f7169616e77656e2d7265732e6f73732d616363656c65726174652e616c6979756e63732e636f6d2f6173736574732f626c6f672f7177656e312e352f6c6f676f5f7177656e312e352e6a7067" style="width:40%">
<b>Qwen1.5-32B</b>: Fitting the Capstone of the Qwen1.5 Language Model Series
"""
with gr.Blocks() as demo:
gr.Markdown("""<center><font size=8>Qwen1.5-32B-Chat</center>""")
with gr.Row():
with gr.Column(scale=3):
system_input = gr.Textbox(value=default_system, lines=1, label='System')
with gr.Column(scale=1):
modify_system = gr.Button("🛠️ Set system prompt and clear the history", scale=2)
system_state = gr.Textbox(value=default_system, visible=False)
chatbot = gr.Chatbot(label='Qwen1.5-32B-Chat', placeholder=placeholder)
textbox = gr.Textbox(lines=2, label='Input')
with gr.Row():
clear_history = gr.Button("🧹 Clear history")
sumbit = gr.Button("🚀 Send")
sumbit.click(model_chat,
inputs=[textbox, chatbot, system_state],
outputs=[textbox, chatbot, system_input])
clear_history.click(fn=clear_session,
inputs=[],
outputs=[textbox, chatbot])
modify_system.click(fn=modify_system_session,
inputs=[system_input],
outputs=[system_state, system_input, chatbot])
demo.queue(api_open=False)
demo.launch(max_threads=30)
|