Spaces:
Sleeping
Sleeping
import streamlit as st | |
from streamlit_lottie import st_lottie | |
import fitz # PyMuPDF | |
import requests | |
import os, shutil | |
import sidebar | |
import llm_model | |
def index_document(uploaded_file): | |
if uploaded_file is not None: | |
# Specify the folder path where you want to store the uploaded file in the 'assets' folder | |
assets_folder = "assets/uploaded_files" | |
if not os.path.exists(assets_folder): | |
os.makedirs(assets_folder) | |
# Save the uploaded file to the specified folder | |
file_path = os.path.join(assets_folder, uploaded_file.name) | |
with open(file_path, "wb") as f: | |
f.write(uploaded_file.getvalue()) | |
file_name = os.path.join(assets_folder, uploaded_file.name) | |
st.success(f"File '{file_name}' uploaded !") | |
with st.spinner("Indexing document... This is a free CPU version and may take a while⏳"): | |
llm_model.create_vector_db(file_name, instructor_embeddings) | |
return file_name | |
else: | |
return None | |
def load_lottieurl(url: str): | |
r = requests.get(url) | |
if r.status_code != 200: | |
return None | |
return r.json() | |
def is_query_valid(query: str) -> bool: | |
if not query: | |
st.error("Please enter a question!") | |
return False | |
return True | |
# Function to load model parameters | |
def load_model(): | |
return llm_model.load_model_params() | |
st.set_page_config(page_title="Document QA Bot") | |
lottie_book = load_lottieurl("https://assets4.lottiefiles.com/temp/lf20_aKAfIn.json") | |
st_lottie(lottie_book, speed=1, height=200, key="initial") | |
# Place the title below the Lottie animation | |
st.title("Document Q&A Bot 🤖") | |
# Left Sidebar | |
sidebar.sidebar() | |
# st.sidebar.header("Upload PDF") | |
# load model parameters | |
llm, instructor_embeddings = load_model() | |
# Upload file through Streamlit | |
uploaded_file = st.file_uploader("Upload a file", type=["pdf", "doc", "docx", "txt"]) | |
filename = index_document(uploaded_file) | |
print(filename) | |
if not filename: | |
st.stop() | |
with st.form(key="qa_form"): | |
query = st.text_area("Ask a question about the document") | |
submit = st.form_submit_button("Submit") | |
if submit: | |
if not is_query_valid(query): | |
st.stop() | |
# Output Columns | |
answer_col, sources_col = st.columns(2) | |
qa_chain = llm_model.document_parser(instructor_embeddings, llm) | |
result = qa_chain(query) | |
with answer_col: | |
st.markdown("#### Answer") | |
st.markdown(result["result"]) | |
with sources_col: | |
st.markdown("#### Sources") | |
if not ("i don't know" in result["result"].lower()): | |
for source in result["source_documents"]: | |
st.markdown(source.page_content) | |
st.markdown(source.metadata["source"]) | |
st.markdown("--------------------------") | |