Update app.py
Browse files
app.py
CHANGED
@@ -215,7 +215,7 @@ def Chat_Message(history, messages1):
|
|
215 |
response = chat_model.invoke(messages1)
|
216 |
except Exception as e:
|
217 |
error_message = str(e)
|
218 |
-
|
219 |
end_index = error_message.find("and 4096 max_new_tokens\nMake sure 'text-generation' task is supported by the model.")
|
220 |
if start_index != -1 and end_index != -1:
|
221 |
raise gr.Error(error_message[start_index:end_index].strip()) from e
|
@@ -297,7 +297,7 @@ def Chart_Generator(history, messages3):
|
|
297 |
response = chat_model.invoke(messages3)
|
298 |
except Exception as e:
|
299 |
error_message = str(e)
|
300 |
-
|
301 |
end_index = error_message.find("and 4096 max_new_tokens\nMake sure 'text-generation' task is supported by the model.")
|
302 |
if start_index != -1 and end_index != -1:
|
303 |
raise gr.Error(error_message[start_index:end_index].strip()) from e
|
@@ -321,7 +321,7 @@ def Chart_Generator(history, messages3):
|
|
321 |
response = chat_model.invoke(messages3)
|
322 |
except Exception as e:
|
323 |
error_message = str(e)
|
324 |
-
|
325 |
end_index = error_message.find("and 4096 max_new_tokens\nMake sure 'text-generation' task is supported by the model.")
|
326 |
if start_index != -1 and end_index != -1:
|
327 |
raise gr.Error(error_message[start_index:end_index].strip()) from e
|
@@ -379,7 +379,7 @@ def Link_Scratch(history, messages4):
|
|
379 |
response = chat_model.invoke(messages4)
|
380 |
except Exception as e:
|
381 |
error_message = str(e)
|
382 |
-
|
383 |
end_index = error_message.find("and 4096 max_new_tokens\nMake sure 'text-generation' task is supported by the model.")
|
384 |
if start_index != -1 and end_index != -1:
|
385 |
raise gr.Error(error_message[start_index:end_index].strip()) from e
|
@@ -447,7 +447,7 @@ def File_Interact(history, filepath, messages5):
|
|
447 |
response = chat_model.invoke(messages5)
|
448 |
except Exception as e:
|
449 |
error_message = str(e)
|
450 |
-
|
451 |
end_index = error_message.find("and 4096 max_new_tokens\nMake sure 'text-generation' task is supported by the model.")
|
452 |
if start_index != -1 and end_index != -1:
|
453 |
raise gr.Error(error_message[start_index:end_index].strip()) from e
|
@@ -495,7 +495,7 @@ def Explore_WebSite(history, messages6):
|
|
495 |
response = chat_model.invoke(messages6)
|
496 |
except Exception as e:
|
497 |
error_message = str(e)
|
498 |
-
|
499 |
end_index = error_message.find("and 4096 max_new_tokens\nMake sure 'text-generation' task is supported by the model.")
|
500 |
if start_index != -1 and end_index != -1:
|
501 |
raise gr.Error(error_message[start_index:end_index].strip()) from e
|
|
|
215 |
response = chat_model.invoke(messages1)
|
216 |
except Exception as e:
|
217 |
error_message = str(e)
|
218 |
+
start_index = error_message.find("Input validation error:")
|
219 |
end_index = error_message.find("and 4096 max_new_tokens\nMake sure 'text-generation' task is supported by the model.")
|
220 |
if start_index != -1 and end_index != -1:
|
221 |
raise gr.Error(error_message[start_index:end_index].strip()) from e
|
|
|
297 |
response = chat_model.invoke(messages3)
|
298 |
except Exception as e:
|
299 |
error_message = str(e)
|
300 |
+
start_index = error_message.find("Input validation error:")
|
301 |
end_index = error_message.find("and 4096 max_new_tokens\nMake sure 'text-generation' task is supported by the model.")
|
302 |
if start_index != -1 and end_index != -1:
|
303 |
raise gr.Error(error_message[start_index:end_index].strip()) from e
|
|
|
321 |
response = chat_model.invoke(messages3)
|
322 |
except Exception as e:
|
323 |
error_message = str(e)
|
324 |
+
start_index = error_message.find("Input validation error:")
|
325 |
end_index = error_message.find("and 4096 max_new_tokens\nMake sure 'text-generation' task is supported by the model.")
|
326 |
if start_index != -1 and end_index != -1:
|
327 |
raise gr.Error(error_message[start_index:end_index].strip()) from e
|
|
|
379 |
response = chat_model.invoke(messages4)
|
380 |
except Exception as e:
|
381 |
error_message = str(e)
|
382 |
+
start_index = error_message.find("Input validation error:")
|
383 |
end_index = error_message.find("and 4096 max_new_tokens\nMake sure 'text-generation' task is supported by the model.")
|
384 |
if start_index != -1 and end_index != -1:
|
385 |
raise gr.Error(error_message[start_index:end_index].strip()) from e
|
|
|
447 |
response = chat_model.invoke(messages5)
|
448 |
except Exception as e:
|
449 |
error_message = str(e)
|
450 |
+
start_index = error_message.find("Input validation error:")
|
451 |
end_index = error_message.find("and 4096 max_new_tokens\nMake sure 'text-generation' task is supported by the model.")
|
452 |
if start_index != -1 and end_index != -1:
|
453 |
raise gr.Error(error_message[start_index:end_index].strip()) from e
|
|
|
495 |
response = chat_model.invoke(messages6)
|
496 |
except Exception as e:
|
497 |
error_message = str(e)
|
498 |
+
start_index = error_message.find("Input validation error:")
|
499 |
end_index = error_message.find("and 4096 max_new_tokens\nMake sure 'text-generation' task is supported by the model.")
|
500 |
if start_index != -1 and end_index != -1:
|
501 |
raise gr.Error(error_message[start_index:end_index].strip()) from e
|