Add max tokens
Browse files
app.py
CHANGED
@@ -36,6 +36,7 @@ def predict(message, history, threshold, temperature):
|
|
36 |
messages=history_openai_format, # Chat history
|
37 |
temperature=temperature, # Temperature for text generation
|
38 |
stream=True, # Stream response
|
|
|
39 |
)
|
40 |
print(stream)
|
41 |
|
|
|
36 |
messages=history_openai_format, # Chat history
|
37 |
temperature=temperature, # Temperature for text generation
|
38 |
stream=True, # Stream response
|
39 |
+
max_tokens=512
|
40 |
)
|
41 |
print(stream)
|
42 |
|