Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
@@ -16,7 +16,15 @@ generator = get_pretrained_models("7B", "tokenizer", local_rank, world_size)
|
|
16 |
|
17 |
history = []
|
18 |
|
19 |
-
def chat(
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
20 |
bot_response = get_output(
|
21 |
generator=generator,
|
22 |
prompt=user_input,
|
@@ -26,7 +34,7 @@ def chat(user_input, top_p, temperature, max_gen_len, state_chatbot):
|
|
26 |
|
27 |
# remove the first phrase identical to user prompt
|
28 |
bot_response = bot_response[0][len(user_input):]
|
29 |
-
bot_response = bot_response.replace("\n", "<br
|
30 |
# trip the last phrase
|
31 |
try:
|
32 |
bot_response = bot_response[:bot_response.rfind(".")]
|
@@ -66,11 +74,18 @@ with gr.Blocks(css = """#col_container {width: 95%; margin-left: auto; margin-ri
|
|
66 |
textbox = gr.Textbox(placeholder="Enter a prompt")
|
67 |
|
68 |
with gr.Accordion("Parameters", open=False):
|
|
|
|
|
|
|
69 |
max_gen_len = gr.Slider(minimum=20, maximum=512, value=256, step=1, interactive=True, label="Max Genenration Length",)
|
70 |
top_p = gr.Slider(minimum=-0, maximum=1.0, value=1.0, step=0.05, interactive=True, label="Top-p (nucleus sampling)",)
|
71 |
temperature = gr.Slider(minimum=-0, maximum=5.0, value=1.0, step=0.1, interactive=True, label="Temperature",)
|
72 |
|
73 |
-
textbox.submit(
|
|
|
|
|
|
|
|
|
74 |
textbox.submit(reset_textbox, [], [textbox])
|
75 |
|
76 |
demo.queue(api_open=False).launch()
|
|
|
16 |
|
17 |
history = []
|
18 |
|
19 |
+
def chat(
|
20 |
+
user_input,
|
21 |
+
include_input,
|
22 |
+
truncate,
|
23 |
+
top_p,
|
24 |
+
temperature,
|
25 |
+
max_gen_len,
|
26 |
+
state_chatbot
|
27 |
+
):
|
28 |
bot_response = get_output(
|
29 |
generator=generator,
|
30 |
prompt=user_input,
|
|
|
34 |
|
35 |
# remove the first phrase identical to user prompt
|
36 |
bot_response = bot_response[0][len(user_input):]
|
37 |
+
bot_response = bot_response.replace("\n", "<br>")
|
38 |
# trip the last phrase
|
39 |
try:
|
40 |
bot_response = bot_response[:bot_response.rfind(".")]
|
|
|
74 |
textbox = gr.Textbox(placeholder="Enter a prompt")
|
75 |
|
76 |
with gr.Accordion("Parameters", open=False):
|
77 |
+
include_input = gr.Checkbox(value=True, label="Do you want to include the input in the generated text?")
|
78 |
+
truncate = gr.Checkbox(value=True, label="Truncate the unfinished last words?")
|
79 |
+
|
80 |
max_gen_len = gr.Slider(minimum=20, maximum=512, value=256, step=1, interactive=True, label="Max Genenration Length",)
|
81 |
top_p = gr.Slider(minimum=-0, maximum=1.0, value=1.0, step=0.05, interactive=True, label="Top-p (nucleus sampling)",)
|
82 |
temperature = gr.Slider(minimum=-0, maximum=5.0, value=1.0, step=0.1, interactive=True, label="Temperature",)
|
83 |
|
84 |
+
textbox.submit(
|
85 |
+
chat,
|
86 |
+
[textbox, include_input, truncate, top_p, temperature, max_gen_len, state_chatbot],
|
87 |
+
[state_chatbot, chatbot]
|
88 |
+
)
|
89 |
textbox.submit(reset_textbox, [], [textbox])
|
90 |
|
91 |
demo.queue(api_open=False).launch()
|