Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -27,25 +27,26 @@ prompt_template="""<|begin_of_text|>Dibawah ini adalah percakapan antara dokter
|
|
27 |
"""
|
28 |
|
29 |
def output_inference(tanya, history):
|
|
|
30 |
prompt = prompt_template.format(ask=tanya)
|
31 |
|
32 |
output = llm(
|
33 |
-
|
34 |
-
|
35 |
-
|
36 |
-
|
37 |
-
|
38 |
-
|
39 |
-
|
40 |
-
|
41 |
-
|
|
|
|
|
|
|
|
|
|
|
42 |
|
43 |
-
|
44 |
-
inference = output['choices'][0]['text']
|
45 |
-
|
46 |
-
for i in range(len(inference)):
|
47 |
-
yield inference[: i+1]
|
48 |
-
# return output['choices'][0]['text']
|
49 |
|
50 |
|
51 |
gr.ChatInterface(
|
|
|
27 |
"""
|
28 |
|
29 |
def output_inference(tanya, history):
|
30 |
+
temp = ""
|
31 |
prompt = prompt_template.format(ask=tanya)
|
32 |
|
33 |
output = llm(
|
34 |
+
prompt,
|
35 |
+
stop=["<|end_of_text|>"],
|
36 |
+
max_tokens=512,
|
37 |
+
temperature=0.3,
|
38 |
+
top_p=0.95,
|
39 |
+
top_k=40,
|
40 |
+
min_p=0.05,
|
41 |
+
typical_p=1.0,
|
42 |
+
stream=True,
|
43 |
+
)
|
44 |
+
for out in output:
|
45 |
+
stream = copy.deepcopy(out)
|
46 |
+
temp += stream["choices"][0]["text"]
|
47 |
+
yield temp
|
48 |
|
49 |
+
history = ["init", prompt]
|
|
|
|
|
|
|
|
|
|
|
50 |
|
51 |
|
52 |
gr.ChatInterface(
|