Spaces:
Runtime error
Runtime error
import gradio as gr | |
def chat(user_input, history=[]): | |
""" | |
Chat function that takes user input and conversation history, | |
returns the model response and updates the history. Handles potential | |
memory issues by clearing conversation history after a certain length. | |
Args: | |
user_input: String containing the user's message. | |
history: List of tuples containing conversation history | |
(user_message, model_response). | |
Returns: | |
A tuple containing the model response and updated history (limited length). | |
""" | |
# Update history with user input | |
history.append((user_input, None)) | |
# Clear conversation history if it exceeds a certain length (adjust as needed) | |
if len(history) > 10: | |
history = history[-5:] # Keep the most recent 5 interactions | |
# Access the loaded model (replace with appropriate error handling) | |
model = gr.get("chatbot_model") | |
# Generate response using the model (consider error handling and retries) | |
response = model(user_input, max_length=50, do_sample=True)[0]['generated_text'] | |
# Update history with model response | |
history.append((None, response)) | |
return response, history | |
# Attempt to load the model from Hugging Face (consider error handling) | |
try: | |
chatbot_model = gr.load("models/lucas-w/mental-health-chatbot-3") | |
except Exception as e: | |
print(f"Error loading model: {e}") | |
chatbot_model = None # Handle the case where model loading fails | |
# Launch the Gradio interface with error handling | |
if chatbot_model is not None: | |
interface = gr.Interface( | |
fn=chat, | |
inputs="textbox", | |
outputs="textbox", | |
interpretation="chat", | |
title="Mental Health Chatbot", | |
description="Talk to a mental health assistant )", | |
elem_id="chat-container", | |
css=""" | |
#chat-container { | |
height: 400px; | |
overflow-y: scroll; | |
} | |
""" | |
) | |
interface.launch() | |
else: | |
print("Failed to launch chatbot. Please check model availability and error messages.") | |