ShieldX commited on
Commit
c15b189
1 Parent(s): 018fc10

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +3 -3
app.py CHANGED
@@ -24,7 +24,7 @@ def predict(message, history):
24
 
25
  inputs = tokenizer([messages], return_tensors="pt").to("cpu")
26
 
27
- streamer = TextStreamer(tokenizer)
28
 
29
  generation_config = GenerationConfig(
30
  penalty_alpha=0.6,
@@ -34,12 +34,12 @@ def predict(message, history):
34
  top_k=5,
35
  temperature=0.5,
36
  repetition_penalty=1.2,
37
- max_new_tokens=64,
38
  eos_token_id=tokenizer.eos_token_id,
39
  pad_token_id=tokenizer.eos_token_id
40
  )
41
 
42
- outputs = model.generate(**inputs,streamer=streamer, generation_config=generation_config)
43
  response = tokenizer.decode(outputs[0], skip_special_tokens=True)
44
  return response[len(formatted_prompt(message)):]
45
 
 
24
 
25
  inputs = tokenizer([messages], return_tensors="pt").to("cpu")
26
 
27
+ # streamer = TextStreamer(tokenizer)
28
 
29
  generation_config = GenerationConfig(
30
  penalty_alpha=0.6,
 
34
  top_k=5,
35
  temperature=0.5,
36
  repetition_penalty=1.2,
37
+ # max_new_tokens=64,
38
  eos_token_id=tokenizer.eos_token_id,
39
  pad_token_id=tokenizer.eos_token_id
40
  )
41
 
42
+ outputs = model.generate(**inputs, generation_config=generation_config)
43
  response = tokenizer.decode(outputs[0], skip_special_tokens=True)
44
  return response[len(formatted_prompt(message)):]
45