from InnovationHub.llm.vector_store import * from InnovationHub.llm.chain import * import os import pprint import codecs import chardet import gradio as gr from langchain.llms import HuggingFacePipeline from langchain.text_splitter import RecursiveCharacterTextSplitter from langchain.embeddings import HuggingFaceEmbeddings from langchain.vectorstores import FAISS from langchain import OpenAI, ConversationChain, LLMChain, PromptTemplate from langchain.chains.conversation.memory import ConversationalBufferWindowMemory from EdgeGPT import Chatbot """ # Create the vector index db_path = "./data/s-class-manual" embeddings = HuggingfaceEmbeddings() index = FAISS(docs=docs, folder_path=db_path, embeddings=embeddings) """ # Launch the Gradio UI def start_gradio(): chatbot_interface = gr.Interface( fn=chatbot, inputs=["text", gr.inputs.Checkbox(label="Create bot"), gr.inputs.Slider( minimum=1, maximum=10, step=1, label="k")], outputs="text", title="Owner's Manual", description="Ask your vehicle manual and get a response.", examples=[ ["What are the different features of the dashboard console?", True, 2], ["What do they do?", False, 3] ] ) chatbot_interface.launch() if __name__ == '__main__': start_ui()