Spaces:
Running
on
Zero
Running
on
Zero
Update app.py
Browse files
app.py
CHANGED
@@ -46,7 +46,7 @@ model = model.eval()
|
|
46 |
def stream_chat(message: str, history: list, temperature: float, max_new_tokens: int, top_p: float, top_k: int, penalty: float):
|
47 |
print(history)
|
48 |
|
49 |
-
resp
|
50 |
tokenizer,
|
51 |
query = message,
|
52 |
history = history,
|
@@ -55,9 +55,9 @@ def stream_chat(message: str, history: list, temperature: float, max_new_tokens:
|
|
55 |
top_p = top_p,
|
56 |
top_k = top_k,
|
57 |
temperature = temperature,
|
58 |
-
)
|
59 |
-
|
60 |
-
|
61 |
|
62 |
|
63 |
chatbot = gr.Chatbot(height=600, placeholder=PLACEHOLDER)
|
|
|
46 |
def stream_chat(message: str, history: list, temperature: float, max_new_tokens: int, top_p: float, top_k: int, penalty: float):
|
47 |
print(history)
|
48 |
|
49 |
+
for resp, history in model.stream_chat(
|
50 |
tokenizer,
|
51 |
query = message,
|
52 |
history = history,
|
|
|
55 |
top_p = top_p,
|
56 |
top_k = top_k,
|
57 |
temperature = temperature,
|
58 |
+
):
|
59 |
+
print(resp)
|
60 |
+
yield resp
|
61 |
|
62 |
|
63 |
chatbot = gr.Chatbot(height=600, placeholder=PLACEHOLDER)
|