# Import Gradio for UI, along with other necessary libraries import gradio as gr from rag_app.agents.react_agent import agent_executor # need to import the qa! # Function to add a new input to the chat history def add_text(history, text): # Append the new text to the history with a placeholder for the response history = history + [(text, None)] return history, "" # Function representing the bot's response mechanism def bot(history): # Obtain the response from the 'infer' function using the latest input response = infer(history[-1][0], history) #sources = [doc.metadata.get("source") for doc in response['source_documents']] #src_list = '\n'.join(sources) #print_this = response['result'] + "\n\n\n Sources: \n\n\n" + src_list #history[-1][1] = print_this #response['answer'] # Update the history with the bot's response history[-1][1] = response['output'] return history # Function to infer the response using the RAG model def infer(question, history): # Use the question and history to query the RAG model #result = qa({"query": question, "history": history, "question": question}) try: result = agent_executor.invoke( { "input": question, "chat_history": history } ) return result except Exception: raise gr.Error("Model is Overloaded, Please retry later!") # CSS styling for the Gradio interface css = """ #col-container {max-width: 700px; margin-left: auto; margin-right: auto;} """ # HTML content for the Gradio interface title title = """
Hello, I BotTina 2.0, your intelligent AI assistant. I can help you explore Wuerttembergische Versicherungs products.