kazuma313 commited on
Commit
2a49825
1 Parent(s): 00aba42

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +11 -11
app.py CHANGED
@@ -32,17 +32,17 @@ def output_inference(tanya, history):
32
  prompt = prompt_template.format(ask=tanya)
33
 
34
  output = llm(
35
- repeat_penalty=1.2,
36
- prompt,
37
- stop=["<|end_of_text|>"],
38
- max_tokens=512,
39
- temperature=0.3,
40
- top_p=0.95,
41
- top_k=40,
42
- min_p=0.05,
43
- typical_p=1.0,
44
- stream=True,
45
- )
46
  for out in output:
47
  stream = copy.deepcopy(out)
48
  temp += stream["choices"][0]["text"]
 
32
  prompt = prompt_template.format(ask=tanya)
33
 
34
  output = llm(
35
+ repeat_penalty=1.2,
36
+ prompt,
37
+ stop=["<|end_of_text|>"],
38
+ max_tokens=512,
39
+ temperature=0.3,
40
+ top_p=0.95,
41
+ top_k=40,
42
+ min_p=0.05,
43
+ typical_p=1.0,
44
+ stream=True,
45
+ )
46
  for out in output:
47
  stream = copy.deepcopy(out)
48
  temp += stream["choices"][0]["text"]