Small change to chat prompt
Browse files- modeling_internlm.py +1 -3
modeling_internlm.py
CHANGED
@@ -769,9 +769,7 @@ class InternLMForCausalLM(InternLMPreTrainedModel):
|
|
769 |
def build_inputs(self, tokenizer, query: str, history: List[Tuple[str, str]] = []):
|
770 |
prompt = ""
|
771 |
for record in history:
|
772 |
-
prompt += f"""
|
773 |
-
if len(prompt) == 0:
|
774 |
-
prompt += "<s>"
|
775 |
prompt += f"""<|User|>:{query}<eoh>\n<|Bot|>:"""
|
776 |
return tokenizer([prompt], return_tensors="pt")
|
777 |
|
|
|
769 |
def build_inputs(self, tokenizer, query: str, history: List[Tuple[str, str]] = []):
|
770 |
prompt = ""
|
771 |
for record in history:
|
772 |
+
prompt += f"""<|User|>:{record[0]}<eoh>\n<|Bot|>:{record[1]}<eoa>\n"""
|
|
|
|
|
773 |
prompt += f"""<|User|>:{query}<eoh>\n<|Bot|>:"""
|
774 |
return tokenizer([prompt], return_tensors="pt")
|
775 |
|