mou3az commited on
Commit
12c861a
1 Parent(s): 13a09ca

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +27 -26
app.py CHANGED
@@ -158,7 +158,7 @@ def normalize_text(text):
158
  return ' '.join(words)
159
 
160
  llm = HuggingFaceEndpoint(
161
- repo_id="mistralai/Mistral-7B-Instruct-v0.2",
162
  task="text-generation",
163
  max_new_tokens=4096,
164
  temperature=0.6,
@@ -194,8 +194,9 @@ def user3(user_message, history, file_path):
194
  return "", history + [[combined_message, None]], file_path
195
 
196
  messages1 = [
197
- HumanMessage(content="You are a helpful assistant."),
198
- AIMessage(content="Sure, how can I assist you today?")]
 
199
  messages2 = messages1.copy()
200
  messages3 = messages1.copy()
201
  messages4 = messages1.copy()
@@ -213,10 +214,10 @@ def Chat_Message(history):
213
  messages1 = messages1[-8:]
214
 
215
  history[-1][1] = ""
216
-
217
- history[-1][1] += response.content
218
-
219
- return history
220
 
221
  def Web_Search(history):
222
  global messages2
@@ -235,7 +236,7 @@ def Web_Search(history):
235
  Query: {message}
236
  Search Content:
237
  {source_knowledge}
238
-
239
  #If the query is not related to specific Search Content, engage in general conversation or provide relevant information from other sources.
240
  """
241
 
@@ -248,10 +249,10 @@ def Web_Search(history):
248
  messages2 = messages2[-8:]
249
 
250
  history[-1][1] = ""
251
-
252
- history[-1][1] += response.content
253
-
254
- return history
255
 
256
  def Chart_Generator(history):
257
  global messages3
@@ -292,10 +293,10 @@ def Chart_Generator(history):
292
  combined_content=response.content
293
 
294
  history[-1][1] = ""
295
-
296
- history[-1][1] += combined_content
297
-
298
- return history
299
 
300
  def Link_Scratch(history):
301
  global messages4
@@ -326,7 +327,7 @@ def Link_Scratch(history):
326
  Query: {user_message}
327
  Link Content:
328
  {content_data}
329
-
330
  #If the query is not related to specific Link Content, engage in general conversation or provide relevant information from other sources.
331
  """
332
  message = HumanMessage(content=augmented_prompt)
@@ -340,10 +341,10 @@ def Link_Scratch(history):
340
  response_message = response.content
341
 
342
  history[-1][1] = ""
343
-
344
- history[-1][1] += response_message
345
-
346
- return history
347
 
348
  def insert_line_breaks(text, every=8):
349
  return '\n'.join(text[i:i+every] for i in range(0, len(text), every))
@@ -384,7 +385,7 @@ def File_Interact(history,filepath):
384
  Query: {user_message}
385
  File Content:
386
  {content_data}
387
-
388
  #If the query is not related to specific File Content, engage in general conversation or provide relevant information from other sources.
389
  """
390
  message = HumanMessage(content=augmented_prompt)
@@ -398,10 +399,10 @@ def File_Interact(history,filepath):
398
  response_message = response.content
399
 
400
  history[-1][1] = ""
401
-
402
- history[-1][1] += response_message
403
-
404
- return history
405
 
406
  with gr.Blocks(theme=gr.themes.Soft()) as demo:
407
  with gr.Row():
 
158
  return ' '.join(words)
159
 
160
  llm = HuggingFaceEndpoint(
161
+ repo_id="HuggingFaceH4/starchat2-15b-v0.1",
162
  task="text-generation",
163
  max_new_tokens=4096,
164
  temperature=0.6,
 
194
  return "", history + [[combined_message, None]], file_path
195
 
196
  messages1 = [
197
+ SystemMessage(content="You are a helpful assistant."),
198
+ HumanMessage(content="Hi AI, how are you today?"),
199
+ AIMessage(content="I'm great thank you. How can I help you?")]
200
  messages2 = messages1.copy()
201
  messages3 = messages1.copy()
202
  messages4 = messages1.copy()
 
214
  messages1 = messages1[-8:]
215
 
216
  history[-1][1] = ""
217
+ for character in response.content:
218
+ history[-1][1] += character
219
+ time.sleep(0.0025)
220
+ yield history
221
 
222
  def Web_Search(history):
223
  global messages2
 
236
  Query: {message}
237
  Search Content:
238
  {source_knowledge}
239
+
240
  #If the query is not related to specific Search Content, engage in general conversation or provide relevant information from other sources.
241
  """
242
 
 
249
  messages2 = messages2[-8:]
250
 
251
  history[-1][1] = ""
252
+ for character in response.content:
253
+ history[-1][1] += character
254
+ time.sleep(0.0025)
255
+ yield history
256
 
257
  def Chart_Generator(history):
258
  global messages3
 
293
  combined_content=response.content
294
 
295
  history[-1][1] = ""
296
+ for character in combined_content:
297
+ history[-1][1] += character
298
+ time.sleep(0.0025)
299
+ yield history
300
 
301
  def Link_Scratch(history):
302
  global messages4
 
327
  Query: {user_message}
328
  Link Content:
329
  {content_data}
330
+
331
  #If the query is not related to specific Link Content, engage in general conversation or provide relevant information from other sources.
332
  """
333
  message = HumanMessage(content=augmented_prompt)
 
341
  response_message = response.content
342
 
343
  history[-1][1] = ""
344
+ for character in response_message:
345
+ history[-1][1] += character
346
+ time.sleep(0.0025)
347
+ yield history
348
 
349
  def insert_line_breaks(text, every=8):
350
  return '\n'.join(text[i:i+every] for i in range(0, len(text), every))
 
385
  Query: {user_message}
386
  File Content:
387
  {content_data}
388
+
389
  #If the query is not related to specific File Content, engage in general conversation or provide relevant information from other sources.
390
  """
391
  message = HumanMessage(content=augmented_prompt)
 
399
  response_message = response.content
400
 
401
  history[-1][1] = ""
402
+ for character in response_message:
403
+ history[-1][1] += character
404
+ time.sleep(0.0025)
405
+ yield history
406
 
407
  with gr.Blocks(theme=gr.themes.Soft()) as demo:
408
  with gr.Row():