Spaces:
Sleeping
Sleeping
vericudebuget
commited on
Commit
•
7339212
1
Parent(s):
18673a5
Update app.py
Browse files
app.py
CHANGED
@@ -12,12 +12,12 @@ def format_prompt(message, history, system_prompt):
|
|
12 |
prompt += f"[INST] {user_prompt} [/INST]"
|
13 |
prompt += f" {bot_response} "
|
14 |
prompt += f"[INST] {message} [/INST]"
|
15 |
-
# Add the system prompt
|
16 |
prompt = system_prompt + prompt
|
17 |
return prompt
|
18 |
|
19 |
-
def generate(prompt, history,
|
20 |
-
system_prompt = ''' '''
|
21 |
temperature = float(temperature)
|
22 |
if temperature < 1e-2:
|
23 |
temperature = 1e-2
|
@@ -36,7 +36,7 @@ def generate(prompt, history, user_system_prompt, temperature=0.9, max_new_token
|
|
36 |
formatted_time = now.strftime("%H.%M.%S, %B, %Y")
|
37 |
|
38 |
# Load chat history from localStorage
|
39 |
-
loaded_history = gr.
|
40 |
history = loaded_history + history
|
41 |
|
42 |
formatted_prompt = format_prompt(f"{prompt}", history, system_prompt)
|
@@ -49,12 +49,11 @@ def generate(prompt, history, user_system_prompt, temperature=0.9, max_new_token
|
|
49 |
|
50 |
# Save the updated chat history to localStorage
|
51 |
new_history = history + [(prompt, output)]
|
52 |
-
gr.
|
53 |
|
54 |
return output
|
55 |
|
56 |
additional_inputs = [
|
57 |
-
gr.Textbox(label="System Prompt", max_lines=1, interactive=True),
|
58 |
gr.Slider(label="Temperature", value=0.9, minimum=0.0, maximum=1.0, step=0.05, interactive=True, info="Higher values produce more diverse outputs"),
|
59 |
gr.Slider(label="Max new tokens", value=9048, minimum=256, maximum=9048, step=64, interactive=True, info="The maximum numbers of new tokens"),
|
60 |
gr.Slider(label="Top-p (nucleus sampling)", value=0.90, minimum=0.0, maximum=1, step=0.05, interactive=True, info="Higher values sample more low-probability tokens"),
|
@@ -64,7 +63,7 @@ additional_inputs = [
|
|
64 |
avatar_images = ("https://i.postimg.cc/pXjKKVXG/user-circle.png", "https://i.postimg.cc/qq04Yz93/CL3.png")
|
65 |
|
66 |
with gr.Blocks() as demo:
|
67 |
-
chatbot = gr.Chatbot(value=gr.
|
68 |
gr.ChatInterface(
|
69 |
fn=generate,
|
70 |
chatbot=gr.Chatbot(show_label=True, show_share_button=False, show_copy_button=True, likeable=True, layout="panel", height="auto", avatar_images=avatar_images),
|
|
|
12 |
prompt += f"[INST] {user_prompt} [/INST]"
|
13 |
prompt += f" {bot_response} "
|
14 |
prompt += f"[INST] {message} [/INST]"
|
15 |
+
# Add the empty system prompt
|
16 |
prompt = system_prompt + prompt
|
17 |
return prompt
|
18 |
|
19 |
+
def generate(prompt, history, temperature=0.9, max_new_tokens=9048, top_p=0.95, repetition_penalty=1.0):
|
20 |
+
system_prompt = ''' ''' # Empty system prompt
|
21 |
temperature = float(temperature)
|
22 |
if temperature < 1e-2:
|
23 |
temperature = 1e-2
|
|
|
36 |
formatted_time = now.strftime("%H.%M.%S, %B, %Y")
|
37 |
|
38 |
# Load chat history from localStorage
|
39 |
+
loaded_history = gr.javascript.call('loadChat')
|
40 |
history = loaded_history + history
|
41 |
|
42 |
formatted_prompt = format_prompt(f"{prompt}", history, system_prompt)
|
|
|
49 |
|
50 |
# Save the updated chat history to localStorage
|
51 |
new_history = history + [(prompt, output)]
|
52 |
+
gr.javascript.call('saveChat', [new_history])
|
53 |
|
54 |
return output
|
55 |
|
56 |
additional_inputs = [
|
|
|
57 |
gr.Slider(label="Temperature", value=0.9, minimum=0.0, maximum=1.0, step=0.05, interactive=True, info="Higher values produce more diverse outputs"),
|
58 |
gr.Slider(label="Max new tokens", value=9048, minimum=256, maximum=9048, step=64, interactive=True, info="The maximum numbers of new tokens"),
|
59 |
gr.Slider(label="Top-p (nucleus sampling)", value=0.90, minimum=0.0, maximum=1, step=0.05, interactive=True, info="Higher values sample more low-probability tokens"),
|
|
|
63 |
avatar_images = ("https://i.postimg.cc/pXjKKVXG/user-circle.png", "https://i.postimg.cc/qq04Yz93/CL3.png")
|
64 |
|
65 |
with gr.Blocks() as demo:
|
66 |
+
chatbot = gr.Chatbot(value=gr.javascript.call('loadChat'))
|
67 |
gr.ChatInterface(
|
68 |
fn=generate,
|
69 |
chatbot=gr.Chatbot(show_label=True, show_share_button=False, show_copy_button=True, likeable=True, layout="panel", height="auto", avatar_images=avatar_images),
|