alfonsovelp commited on
Commit
0c506f5
1 Parent(s): b53a36f

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +1 -1
app.py CHANGED
@@ -38,7 +38,7 @@ def generate(
38
 
39
  inputs = tokenizer.apply_chat_template(formatted_prompt, return_tensors="pt").to("cuda")
40
 
41
- strea = model.generate(inputs, , **generate_kwargs, stream=True, details=True, return_full_text=False)
42
  output = ""
43
 
44
  for response in stream:
 
38
 
39
  inputs = tokenizer.apply_chat_template(formatted_prompt, return_tensors="pt").to("cuda")
40
 
41
+ strea = model.generate(inputs, **generate_kwargs, stream=True, details=True, return_full_text=False)
42
  output = ""
43
 
44
  for response in stream: