# Import Gradio for UI, along with other necessary libraries import gradio as gr from rag_app.loading_data.load_S3_vector_stores import get_chroma_vs from rag_app.agents.react_agent import agent_executor from config import db get_chroma_vs() if __name__ == "__main__": # Function to add a new input to the chat history def add_text(history, text): # Append the new text to the history with a placeholder for the response history = history + [(text, None)] return history, "" # Function representing the bot's response mechanism def bot(history): # Obtain the response from the 'infer' function using the latest input response = infer(history[-1][0], history) print(response) history[-1][1] = response['output'] return history # Function to infer the response using the RAG model def infer(question, history): # Use the question and history to query the RAG model #result = qa({"query": question, "history": history, "question": question}) try: result = agent_executor.invoke( { "input": question, "chat_history": history } ) return result except Exception: raise gr.Warning("Model is Overloaded, please try again in a few minutes!") def vote(data: gr.LikeData): if data.liked: print("You upvoted this response: ") else: print("You downvoted this response: ") def get_examples(input_text: str): tmp_history = [(input_text, None)] response = infer(input_text, tmp_history) return response['output'] # CSS styling for the Gradio interface css = """ #col-container {max-width: 1200px; margin-left: auto; margin-right: auto;} """ # HTML content for the Gradio interface title title = """
Hello, I BotTina 2.0, your intelligent AI assistant. I can help you explore Wuerttembergische Versicherungs products.