ClueAI commited on
Commit
8326622
1 Parent(s): 6dbcbb7

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +1 -1
app.py CHANGED
@@ -28,7 +28,7 @@ def answer(text, sample=True, top_p=0.9, temperature=0.7):
28
  if not sample:
29
  out = model.generate(**encoding, return_dict_in_generate=True, output_scores=False, max_new_tokens=1024, num_beams=1, length_penalty=0.6)
30
  else:
31
- out = model.generate(**encoding, return_dict_in_generate=True, output_scores=False, max_new_tokens=1024, do_sample=True, top_p=top_p, temperature=temperature, no_repeat_ngram_size=3)
32
  #out=model.generate(**encoding, **generate_config)
33
  out_text = tokenizer.batch_decode(out["sequences"], skip_special_tokens=True)
34
  return postprocess(out_text[0])
 
28
  if not sample:
29
  out = model.generate(**encoding, return_dict_in_generate=True, output_scores=False, max_new_tokens=1024, num_beams=1, length_penalty=0.6)
30
  else:
31
+ out = model.generate(**encoding, return_dict_in_generate=True, output_scores=False, max_new_tokens=1024, do_sample=True, top_p=top_p, temperature=temperature, no_repeat_ngram_size=12)
32
  #out=model.generate(**encoding, **generate_config)
33
  out_text = tokenizer.batch_decode(out["sequences"], skip_special_tokens=True)
34
  return postprocess(out_text[0])