Spaces:
Sleeping
Sleeping
add chatbot functionality
Browse files
app.py
CHANGED
@@ -9,6 +9,8 @@ from langchain.prompts import PromptTemplate
|
|
9 |
from langchain.chains.question_answering import load_qa_chain
|
10 |
import streamlit as st
|
11 |
|
|
|
|
|
12 |
confluence_api_key = os.environ["CONFLUENCE_API_KEY"]
|
13 |
|
14 |
if "GOOGLE_API_KEY" not in os.environ:
|
@@ -34,27 +36,43 @@ llm = ChatGoogleGenerativeAI(model="gemini-1.5-flash-latest")
|
|
34 |
vector_store = FAISS.from_texts(chunks, embedding=embeddings)
|
35 |
vector_store.save_local("faiss_index")
|
36 |
|
37 |
-
|
|
|
|
|
38 |
prompt_template = """
|
39 |
-
Answer the question as detailed as possible
|
40 |
-
provided context just say, "
|
|
|
41 |
Context:\n {context}?\n
|
42 |
Question: \n{question}\n
|
43 |
|
44 |
Answer:
|
45 |
"""
|
46 |
-
prompt = PromptTemplate(template=prompt_template, input_variables=["context", "question"])
|
47 |
chain = load_qa_chain(llm, chain_type="stuff", prompt=prompt)
|
48 |
db = FAISS.load_local("faiss_index", embeddings, allow_dangerous_deserialization=True)
|
49 |
docs = db.similarity_search(query)
|
50 |
-
response = chain({"input_documents" : docs, "question": query}, return_only_outputs = True)
|
51 |
return response["output_text"]
|
52 |
|
53 |
|
54 |
if __name__ == '__main__':
|
55 |
st.set_page_config("Chat with Confluence Page")
|
56 |
st.header("Chat with Confluence Page using AI")
|
|
|
|
|
|
|
57 |
|
58 |
-
|
59 |
-
|
60 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
9 |
from langchain.chains.question_answering import load_qa_chain
|
10 |
import streamlit as st
|
11 |
|
12 |
+
from config import *
|
13 |
+
|
14 |
confluence_api_key = os.environ["CONFLUENCE_API_KEY"]
|
15 |
|
16 |
if "GOOGLE_API_KEY" not in os.environ:
|
|
|
36 |
vector_store = FAISS.from_texts(chunks, embedding=embeddings)
|
37 |
vector_store.save_local("faiss_index")
|
38 |
|
39 |
+
#chat_history = []
|
40 |
+
|
41 |
+
def get_response(query, chat_history):
|
42 |
prompt_template = """
|
43 |
+
Answer the question as detailed as possible based on the conversation history and the provided context, make sure to provide all the details, if the answer is not in
|
44 |
+
provided context just say, "I am not able to help. Please contact Platform Support Team at [email protected]", don't provide the wrong answer\n\n
|
45 |
+
Conversation History:\n {chat_history}\n
|
46 |
Context:\n {context}?\n
|
47 |
Question: \n{question}\n
|
48 |
|
49 |
Answer:
|
50 |
"""
|
51 |
+
prompt = PromptTemplate(template=prompt_template, input_variables=["chat_history", "context", "question"])
|
52 |
chain = load_qa_chain(llm, chain_type="stuff", prompt=prompt)
|
53 |
db = FAISS.load_local("faiss_index", embeddings, allow_dangerous_deserialization=True)
|
54 |
docs = db.similarity_search(query)
|
55 |
+
response = chain({"input_documents" : docs, "question": query, "chat_history": chat_history}, return_only_outputs = True)
|
56 |
return response["output_text"]
|
57 |
|
58 |
|
59 |
if __name__ == '__main__':
|
60 |
st.set_page_config("Chat with Confluence Page")
|
61 |
st.header("Chat with Confluence Page using AI")
|
62 |
+
|
63 |
+
if "messages" not in st.session_state:
|
64 |
+
st.session_state.messages = []
|
65 |
|
66 |
+
for message in st.session_state.messages:
|
67 |
+
with st.chat_message(message["role"]):
|
68 |
+
st.markdown(message["content"])
|
69 |
+
|
70 |
+
if question := st.chat_input("Ask questions related to login and registration"):
|
71 |
+
st.session_state.messages.append({"role": "user", "content": question})
|
72 |
+
with st.chat_message("user"):
|
73 |
+
st.markdown(question)
|
74 |
+
|
75 |
+
with st.chat_message("assistant"):
|
76 |
+
answer = get_response(question, st.session_state.messages)
|
77 |
+
st.write(answer)
|
78 |
+
st.session_state.messages.append({"role": "assistant", "content": answer})
|