qgyd2021 commited on
Commit
297c6ff
1 Parent(s): 7cb717b
Files changed (1) hide show
  1. main.py +1 -1
main.py CHANGED
@@ -159,7 +159,7 @@ def chat_with_llm_streaming(question: str,
159
 
160
  for encoded_utterance in encoded_utterances:
161
  input_ids.extend(encoded_utterance)
162
- if model.config.model_type == "chatglm":
163
  input_ids.append(tokenizer.eos_token_id)
164
 
165
  input_ids = torch.tensor([input_ids], dtype=torch.long)
 
159
 
160
  for encoded_utterance in encoded_utterances:
161
  input_ids.extend(encoded_utterance)
162
+ if model.config.model_type != "chatglm":
163
  input_ids.append(tokenizer.eos_token_id)
164
 
165
  input_ids = torch.tensor([input_ids], dtype=torch.long)