Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -4,7 +4,7 @@ from huggingface_hub import InferenceClient
|
|
4 |
"""
|
5 |
For more information on `huggingface_hub` Inference API support, please check the docs: https://huggingface.co/docs/huggingface_hub/v0.22.2/en/guides/inference
|
6 |
"""
|
7 |
-
client = InferenceClient("
|
8 |
|
9 |
|
10 |
def respond(
|
@@ -25,19 +25,15 @@ def respond(
|
|
25 |
|
26 |
messages.append({"role": "user", "content": message})
|
27 |
|
28 |
-
|
29 |
-
|
30 |
-
for message in client.chat_completion(
|
31 |
messages,
|
32 |
max_tokens=max_tokens,
|
33 |
-
stream=True,
|
34 |
temperature=temperature,
|
35 |
top_p=top_p,
|
36 |
-
)
|
37 |
-
token = message.choices[0].delta.content
|
38 |
|
39 |
-
|
40 |
-
yield response
|
41 |
|
42 |
"""
|
43 |
For information on how to customize the ChatInterface, peruse the gradio docs: https://www.gradio.app/docs/chatinterface
|
@@ -45,17 +41,24 @@ For information on how to customize the ChatInterface, peruse the gradio docs: h
|
|
45 |
demo = gr.ChatInterface(
|
46 |
respond,
|
47 |
additional_inputs=[
|
48 |
-
gr.Textbox(value="
|
49 |
-
gr.Slider(minimum=1, maximum=2048, value=512, step=1, label="
|
50 |
-
gr.Slider(minimum=0.1, maximum=4.0, value=0.7, step=0.1, label="
|
51 |
gr.Slider(
|
52 |
minimum=0.1,
|
53 |
maximum=1.0,
|
54 |
value=0.95,
|
55 |
step=0.05,
|
56 |
-
label="Top-p (
|
57 |
),
|
58 |
],
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
59 |
)
|
60 |
|
61 |
|
|
|
4 |
"""
|
5 |
For more information on `huggingface_hub` Inference API support, please check the docs: https://huggingface.co/docs/huggingface_hub/v0.22.2/en/guides/inference
|
6 |
"""
|
7 |
+
client = InferenceClient("HuggingfaceH4/zephyr-7b-beta")
|
8 |
|
9 |
|
10 |
def respond(
|
|
|
25 |
|
26 |
messages.append({"role": "user", "content": message})
|
27 |
|
28 |
+
# ストリーミングを無効にして、単一の応答を取得
|
29 |
+
response = client.chat_completion(
|
|
|
30 |
messages,
|
31 |
max_tokens=max_tokens,
|
|
|
32 |
temperature=temperature,
|
33 |
top_p=top_p,
|
34 |
+
)
|
|
|
35 |
|
36 |
+
return response.choices[0].message.content
|
|
|
37 |
|
38 |
"""
|
39 |
For information on how to customize the ChatInterface, peruse the gradio docs: https://www.gradio.app/docs/chatinterface
|
|
|
41 |
demo = gr.ChatInterface(
|
42 |
respond,
|
43 |
additional_inputs=[
|
44 |
+
gr.Textbox(value="ユーザーの質問や依頼にのみ答えてください。ポジティブに答えてください。", label="System message"),
|
45 |
+
gr.Slider(minimum=1, maximum=2048, value=512, step=1, label="新規トークン最大"),
|
46 |
+
gr.Slider(minimum=0.1, maximum=4.0, value=0.7, step=0.1, label="温度"),
|
47 |
gr.Slider(
|
48 |
minimum=0.1,
|
49 |
maximum=1.0,
|
50 |
value=0.95,
|
51 |
step=0.05,
|
52 |
+
label="Top-p (核 sampling)",
|
53 |
),
|
54 |
],
|
55 |
+
examples=[
|
56 |
+
["日本で有名なものと言えば"],
|
57 |
+
["レポートを書いてくれる?"],
|
58 |
+
["C#で素数を判定するコードを書いて"],
|
59 |
+
["250の約数は?"],
|
60 |
+
],
|
61 |
+
concurrency_limit=30 # 例: 同時に4つのリクエストを処理
|
62 |
)
|
63 |
|
64 |
|