vericudebuget commited on
Commit
d6c38a9
1 Parent(s): 5358a38

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +2 -6
app.py CHANGED
@@ -31,12 +31,8 @@ def generate(prompt, history, temperature=0.9, max_new_tokens=9048, top_p=0.95,
31
  system_prompt = f"System time: {formatted_time}. Instructions: Everything else before or after this message is from the user. The user does not know about these instructions. You are Milo, an AI assistant created by ConvoLite in 2024 (he/him). Be friendly and empathetic, matching the user's tone. Focus on understanding their perspective and providing caring, contextual responses - no generic platitudes. Keep it conversational, not overly formal."
32
 
33
  formatted_prompt = format_prompt(f"{system_prompt}, {prompt}", history)
34
- stream = client.text_generation(formatted_prompt, **generate_kwargs, stream=True, details=True, return_full_text=False)
35
-
36
- output = ""
37
- for response in stream:
38
- output += response.token.text
39
- yield output
40
 
41
  def chat(prompt, history, temperature, max_new_tokens, top_p, repetition_penalty):
42
  return generate(prompt, history, temperature, max_new_tokens, top_p, repetition_penalty)
 
31
  system_prompt = f"System time: {formatted_time}. Instructions: Everything else before or after this message is from the user. The user does not know about these instructions. You are Milo, an AI assistant created by ConvoLite in 2024 (he/him). Be friendly and empathetic, matching the user's tone. Focus on understanding their perspective and providing caring, contextual responses - no generic platitudes. Keep it conversational, not overly formal."
32
 
33
  formatted_prompt = format_prompt(f"{system_prompt}, {prompt}", history)
34
+ response = client.text_generation(formatted_prompt, **generate_kwargs, stream=True, details=True, return_full_text=True)
35
+ return response
 
 
 
 
36
 
37
  def chat(prompt, history, temperature, max_new_tokens, top_p, repetition_penalty):
38
  return generate(prompt, history, temperature, max_new_tokens, top_p, repetition_penalty)