Spaces:
Runtime error
Runtime error
gkrthk
commited on
Commit
•
7e976dc
1
Parent(s):
33fe60d
add llmchain
Browse files- app.py +17 -16
- confluence_qa.py +4 -4
app.py
CHANGED
@@ -41,23 +41,24 @@ with st.sidebar.form(key ='Form1'):
|
|
41 |
type="password")
|
42 |
submitted1 = st.form_submit_button(label='Submit')
|
43 |
|
44 |
-
|
45 |
-
|
46 |
-
|
47 |
-
|
48 |
-
|
49 |
-
|
50 |
-
|
51 |
-
|
52 |
-
|
53 |
-
|
54 |
-
|
55 |
-
|
56 |
-
|
57 |
-
|
|
|
58 |
|
59 |
-
|
60 |
-
|
61 |
|
62 |
|
63 |
st.title("Confluence Q&A Demo")
|
|
|
41 |
type="password")
|
42 |
submitted1 = st.form_submit_button(label='Submit')
|
43 |
|
44 |
+
if submitted1 and confluence_url and space_key:
|
45 |
+
st.session_state["config"] = {
|
46 |
+
"persist_directory": None,
|
47 |
+
"confluence_url": confluence_url,
|
48 |
+
"username": username if username != "" else None,
|
49 |
+
"api_key": api_key if api_key != "" else None,
|
50 |
+
"space_key": space_key,
|
51 |
+
"include_attachment": True
|
52 |
+
}
|
53 |
+
with st.spinner(text="Ingesting Confluence..."):
|
54 |
+
### Hardcoding for https://templates.atlassian.net/wiki/ and space RD to avoid multiple OpenAI calls.
|
55 |
+
config = st.session_state["config"]
|
56 |
+
if config["confluence_url"] == "https://templates.atlassian.net/wiki/" and config["space_key"] =="RD":
|
57 |
+
config["persist_directory"] = "chroma_db"
|
58 |
+
st.session_state["config"] = config
|
59 |
|
60 |
+
st.session_state["confluence_qa"] = load_confluence(st.session_state["config"])
|
61 |
+
st.write("Confluence Space Ingested")
|
62 |
|
63 |
|
64 |
st.title("Confluence Q&A Demo")
|
confluence_qa.py
CHANGED
@@ -5,7 +5,7 @@ from langchain import HuggingFacePipeline
|
|
5 |
from langchain.prompts import PromptTemplate
|
6 |
from langchain.chains import RetrievalQA
|
7 |
from langchain.embeddings import HuggingFaceEmbeddings
|
8 |
-
from langchain.vectorstores import
|
9 |
|
10 |
class ConfluenceQA:
|
11 |
def init_embeddings(self) -> None:
|
@@ -20,14 +20,14 @@ class ConfluenceQA:
|
|
20 |
def store_in_vector_db(self) -> None:
|
21 |
config = self.config
|
22 |
loader = ConfluenceLoader(
|
23 |
-
url=config.url, username=config.username, api_key=config.
|
24 |
)
|
25 |
-
documents = loader.load(include_attachments=config.includeAttachements, limit=50,
|
26 |
text_splitter = RecursiveCharacterTextSplitter(chunk_size=1000, chunk_overlap=150)
|
27 |
documents = text_splitter.split_documents(documents)
|
28 |
# text_splitter = TokenTextSplitter(chunk_size=1000, chunk_overlap=10) # This the encoding for text-embedding-ada-002
|
29 |
# texts = text_splitter.split_documents(texts)
|
30 |
-
self.db =
|
31 |
|
32 |
def retrieve_qa_chain(self) -> None:
|
33 |
template = """Use the following pieces of context to answer the question at the end. If you don't know the answer, just say that you don't know, don't try to make up an answer. Keep the answer as concise as possible.
|
|
|
5 |
from langchain.prompts import PromptTemplate
|
6 |
from langchain.chains import RetrievalQA
|
7 |
from langchain.embeddings import HuggingFaceEmbeddings
|
8 |
+
from langchain.vectorstores import Chroma
|
9 |
|
10 |
class ConfluenceQA:
|
11 |
def init_embeddings(self) -> None:
|
|
|
20 |
def store_in_vector_db(self) -> None:
|
21 |
config = self.config
|
22 |
loader = ConfluenceLoader(
|
23 |
+
url=config.url, username=config.username, api_key=config.api_key
|
24 |
)
|
25 |
+
documents = loader.load(include_attachments=config.includeAttachements, limit=50, space_key=config.space_key)
|
26 |
text_splitter = RecursiveCharacterTextSplitter(chunk_size=1000, chunk_overlap=150)
|
27 |
documents = text_splitter.split_documents(documents)
|
28 |
# text_splitter = TokenTextSplitter(chunk_size=1000, chunk_overlap=10) # This the encoding for text-embedding-ada-002
|
29 |
# texts = text_splitter.split_documents(texts)
|
30 |
+
self.db = Chroma.from_documents(documents, self.embeddings)
|
31 |
|
32 |
def retrieve_qa_chain(self) -> None:
|
33 |
template = """Use the following pieces of context to answer the question at the end. If you don't know the answer, just say that you don't know, don't try to make up an answer. Keep the answer as concise as possible.
|