ruixie commited on
Commit
ae7085f
1 Parent(s): 545ee22

Update modeling_codeshell.py

Browse files
Files changed (1) hide show
  1. modeling_codeshell.py +2 -2
modeling_codeshell.py CHANGED
@@ -970,7 +970,7 @@ class CodeShellForCausalLM(CodeShellPreTrainedModel):
970
  generation_config = generation_config or self.generation_config
971
  input_ids = self.build_chat_input(query, history, tokenizer, generation_config.max_new_tokens)
972
  stopping_criteria = StoppingCriteriaList(
973
- [EndOfFunctionCriteria([len(input_ids[0])], ['|<end>|', '|end|', '<|endoftext|>'], tokenizer)]
974
  )
975
 
976
  if stream:
@@ -994,7 +994,7 @@ class CodeShellForCausalLM(CodeShellPreTrainedModel):
994
  input_ids = input_ids[-max_input_tokens:] # truncate left
995
 
996
  stopping_criteria = StoppingCriteriaList(
997
- [EndOfFunctionCriteria([len(input_ids[0])], ['|end|', '|<end>|', '<|endoftext|>'], tokenizer)]
998
  )
999
 
1000
  streamer = TextIterStreamer(tokenizer, skip_prompt=True, skip_special_tokens=True)
 
970
  generation_config = generation_config or self.generation_config
971
  input_ids = self.build_chat_input(query, history, tokenizer, generation_config.max_new_tokens)
972
  stopping_criteria = StoppingCriteriaList(
973
+ [EndOfFunctionCriteria([len(input_ids[0])], ['|<end>|', '|end|', '<|endoftext|>', '## human'], tokenizer)]
974
  )
975
 
976
  if stream:
 
994
  input_ids = input_ids[-max_input_tokens:] # truncate left
995
 
996
  stopping_criteria = StoppingCriteriaList(
997
+ [EndOfFunctionCriteria([len(input_ids[0])], ['|<end>|', '|end|', '<|endoftext|>', '## human'], tokenizer)]
998
  )
999
 
1000
  streamer = TextIterStreamer(tokenizer, skip_prompt=True, skip_special_tokens=True)