kazuma313 commited on
Commit
9ccbda0
1 Parent(s): 48acf2c

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +16 -15
app.py CHANGED
@@ -27,25 +27,26 @@ prompt_template="""<|begin_of_text|>Dibawah ini adalah percakapan antara dokter
27
  """
28
 
29
  def output_inference(tanya, history):
 
30
  prompt = prompt_template.format(ask=tanya)
31
 
32
  output = llm(
33
- prompt,
34
- stop=["<|end_of_text|>"],
35
- max_tokens=512,
36
- temperature=0.3,
37
- top_p=0.95,
38
- top_k=40,
39
- min_p=0.05,
40
- typical_p=1.0,
41
- stream=False,
 
 
 
 
 
42
 
43
- )
44
- inference = output['choices'][0]['text']
45
-
46
- for i in range(len(inference)):
47
- yield inference[: i+1]
48
- # return output['choices'][0]['text']
49
 
50
 
51
  gr.ChatInterface(
 
27
  """
28
 
29
  def output_inference(tanya, history):
30
+ temp = ""
31
  prompt = prompt_template.format(ask=tanya)
32
 
33
  output = llm(
34
+ prompt,
35
+ stop=["<|end_of_text|>"],
36
+ max_tokens=512,
37
+ temperature=0.3,
38
+ top_p=0.95,
39
+ top_k=40,
40
+ min_p=0.05,
41
+ typical_p=1.0,
42
+ stream=True,
43
+ )
44
+ for out in output:
45
+ stream = copy.deepcopy(out)
46
+ temp += stream["choices"][0]["text"]
47
+ yield temp
48
 
49
+ history = ["init", prompt]
 
 
 
 
 
50
 
51
 
52
  gr.ChatInterface(