chasetank's picture
Update InnovationHub/llm/chain.py
7307f03
raw
history blame
3.33 kB
import os
import pprint
import codecs
import chardet
import gradio as gr
from langchain.llms import HuggingFacePipeline
from langchain.text_splitter import RecursiveCharacterTextSplitter
from langchain.embeddings import HuggingFaceEmbeddings
from langchain.vectorstores import FAISS
from langchain import OpenAI, ConversationChain, LLMChain, PromptTemplate
from langchain.chains.conversation.memory import ConversationalBufferWindowMemory
from EdgeGPT import Chatbot
db_path = 'data/s-class-manual'
cookies = os.environ['COOKIES']
embeddings = HuggingFaceEmbeddings()
index = FAISS.load_local(folder_path=db_path, embeddings=embeddings)
bot = Chatbot(cookies=cookies)
def init_chain():
template = """
{history}
Human: {human_input}
Assistant:"""
prompt = PromptTemplate(
input_variables=["history", "human_input"],
template=template
)
chatgpt_chain = LLMChain(
llm=OpenAI(temperature=0),
prompt=prompt,
verbose=True,
memory=ConversationalBufferWindowMemory(k=2),
)
human_input = """I want you to act as a voice assistant for a mercedes-benz vehicle. I will provide you with exerts from a vehicle manual. You must use the exerts to answer the user question as best as you can. If you are unsure about the answer, you will truthfully say "not sure"."""
bot_response = chatgpt_chain.predict(human_input=human_input)
print(bot_response)
return chatgpt_chain
def get_prompt(question, index, k=4):
prompt = """I need information from my vehicle manual. I will provide an [EXCERT] from the manual. Use the [EXCERT] and nothing else to answer the [QUESTION]. You must refer to the "[EXCERT]" as "S-Clss Manual" in your response. Here is the [EXCERT]:"""
similar_docs = index.similarity_search(query=question, k=k)
context = []
for d in similar_docs:
content = d.page_content
context.append(content)
user_input = prompt + '\n[EXCERT]' + '\n' + \
'\n'.join(context[:k]) + '\n' + '[QUESTION]\n' + question
return user_input
async def ask_question(question, index, backend='bing', k=2, create_bot=False):
global bot
if bot is None or create_bot:
bot = Chatbot(cookiePath=cookie_path)
if backend == 'bing':
prompt = get_prompt(question=question, index=index, k=k)
response = (await bot.ask(prompt=prompt))["item"]["messages"][1]["adaptiveCards"][0]["body"][0]["text"]
elif backend == 'gpt3':
prompt = get_prompt(question=question, index=index, k=k)
response = chatgpt_chain.predict(human_input=prompt)
else:
raise ValueError(f"Invalid backend specified: {backend}")
return response
async def chatbot(question, create_bot=False, k=2):
response = await ask_question(question=question, index=index, backend='bing', k=k, create_bot=create_bot)
return response
def start_ui():
chatbot_interface = gr.Interface(
fn=chatbot,
inputs=["text", gr.inputs.Checkbox(label="Create bot"), gr.inputs.Slider(
minimum=1, maximum=10, step=1, label="k")],
outputs="text",
title="Owner's Manual",
description="Ask your vehicle manual and get a response.",
examples=[
["What are the different features of the dashboard console?", True, 2],
["What do they do?", False, 3]
]
)
chatbot_interface.launch()