mou3az commited on
Commit
67588de
1 Parent(s): 90b4e42

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +17 -14
app.py CHANGED
@@ -243,11 +243,12 @@ def Web_Search(history):
243
 
244
  msg=HumanMessage(content=augmented_prompt)
245
  messages2.append(msg)
246
- response = chat_model.invoke(messages2)
247
- messages2.append(AIMessage(content=response.content))
248
 
249
  if len(messages2) >= 8:
250
- messages2 = messages2[-8:]
 
 
 
251
 
252
  history[-1][1] = ""
253
  for character in response.content:
@@ -270,13 +271,13 @@ def Chart_Generator(history):
270
 
271
  prompt = HumanMessage(content=message_with_description)
272
  messages3.append(prompt)
 
 
 
273
 
274
  response = chat_model.invoke(messages3)
275
  messages3.append(AIMessage(content=response.content))
276
 
277
- if len(messages3) >= 8:
278
- messages3 = messages3[-8:]
279
-
280
  combined_content = f'{image_html}<br>{response.content}'
281
  else:
282
  response_text = "Can't generate this image. Please provide valid chart details."
@@ -284,12 +285,12 @@ def Chart_Generator(history):
284
  else:
285
  prompt = HumanMessage(content=message)
286
  messages3.append(prompt)
 
 
 
287
 
288
  response = chat_model.invoke(messages3)
289
  messages3.append(AIMessage(content=response.content))
290
-
291
- if len(messages3) >= 8:
292
- messages3 = messages3[-8:]
293
 
294
  combined_content=response.content
295
 
@@ -333,12 +334,13 @@ def Link_Scratch(history):
333
  """
334
  message = HumanMessage(content=augmented_prompt)
335
  messages4.append(message)
 
 
 
 
336
  response = chat_model.invoke(messages4)
337
  messages4.append(AIMessage(content=response.content))
338
 
339
- if len(messages4) >= 1:
340
- messages4 = messages4[-1:]
341
-
342
  response_message = response.content
343
 
344
  history[-1][1] = ""
@@ -391,11 +393,12 @@ def File_Interact(history,filepath):
391
  """
392
  message = HumanMessage(content=augmented_prompt)
393
  messages5.append(message)
394
- response = chat_model.invoke(messages5)
395
- messages5.append(AIMessage(content=response.content))
396
 
397
  if len(messages5) >= 1:
398
  messages5 = messages5[-1:]
 
 
 
399
 
400
  response_message = response.content
401
 
 
243
 
244
  msg=HumanMessage(content=augmented_prompt)
245
  messages2.append(msg)
 
 
246
 
247
  if len(messages2) >= 8:
248
+ messages2 = messages2[-8:]
249
+
250
+ response = chat_model.invoke(messages2)
251
+ messages2.append(AIMessage(content=response.content))
252
 
253
  history[-1][1] = ""
254
  for character in response.content:
 
271
 
272
  prompt = HumanMessage(content=message_with_description)
273
  messages3.append(prompt)
274
+
275
+ if len(messages3) >= 8:
276
+ messages3 = messages3[-8:]
277
 
278
  response = chat_model.invoke(messages3)
279
  messages3.append(AIMessage(content=response.content))
280
 
 
 
 
281
  combined_content = f'{image_html}<br>{response.content}'
282
  else:
283
  response_text = "Can't generate this image. Please provide valid chart details."
 
285
  else:
286
  prompt = HumanMessage(content=message)
287
  messages3.append(prompt)
288
+
289
+ if len(messages3) >= 8:
290
+ messages3 = messages3[-8:]
291
 
292
  response = chat_model.invoke(messages3)
293
  messages3.append(AIMessage(content=response.content))
 
 
 
294
 
295
  combined_content=response.content
296
 
 
334
  """
335
  message = HumanMessage(content=augmented_prompt)
336
  messages4.append(message)
337
+
338
+ if len(messages4) >= 2:
339
+ messages4 = messages4[-2:]
340
+
341
  response = chat_model.invoke(messages4)
342
  messages4.append(AIMessage(content=response.content))
343
 
 
 
 
344
  response_message = response.content
345
 
346
  history[-1][1] = ""
 
393
  """
394
  message = HumanMessage(content=augmented_prompt)
395
  messages5.append(message)
 
 
396
 
397
  if len(messages5) >= 1:
398
  messages5 = messages5[-1:]
399
+
400
+ response = chat_model.invoke(messages5)
401
+ messages5.append(AIMessage(content=response.content))
402
 
403
  response_message = response.content
404