awacke1's picture
Update app.py
5751846
raw
history blame
4.47 kB
import streamlit as st
import openai
import os
import base64
import glob
from datetime import datetime
from openai import ChatCompletion
from xml.etree import ElementTree as ET
from bs4 import BeautifulSoup
import json
# from dotenv import load_dotenv
# load_dotenv()
openai.api_key = os.getenv('OPENAI_KEY')
def chat_with_model(prompts):
model = "gpt-3.5-turbo"
#model = "gpt-4-32k" # 32k tokens between prompt and inference tokens
conversation = [{'role': 'system', 'content': 'You are a helpful assistant.'}]
conversation.extend([{'role': 'user', 'content': prompt} for prompt in prompts])
response = openai.ChatCompletion.create(model=model, messages=conversation)
return response['choices'][0]['message']['content']
def generate_filename(prompt):
#safe_date_time = datetime.now().strftime("%m%d_%H%M")
safe_date_time = datetime.now().strftime("%m%d_%I_%M_%p")
safe_prompt = "".join(x for x in prompt if x.isalnum())[:30]
return f"{safe_date_time}_{safe_prompt}.txt"
def create_file(filename, prompt, response):
with open(filename, 'w') as file:
file.write(f"<h1>Prompt:</h1> <p>{prompt}</p> <h1>Response:</h1> <p>{response}</p>")
def get_table_download_link_old(file_path):
with open(file_path, 'r') as file:
data = file.read()
b64 = base64.b64encode(data.encode()).decode()
href = f'<a href="data:file/htm;base64,{b64}" target="_blank" download="{os.path.basename(file_path)}">{os.path.basename(file_path)}</a>'
return href
def get_table_download_link(file_path):
import os
import base64
with open(file_path, 'r') as file:
data = file.read()
b64 = base64.b64encode(data.encode()).decode()
file_name = os.path.basename(file_path)
ext = os.path.splitext(file_name)[1] # get the file extension
if ext == '.txt':
mime_type = 'text/plain'
elif ext == '.htm':
mime_type = 'text/html'
elif ext == '.md':
mime_type = 'text/markdown'
else:
mime_type = 'application/octet-stream' # general binary data type
href = f'<a href="data:{mime_type};base64,{b64}" target="_blank" download="{file_name}">{file_name}</a>'
return href
def CompressXML(xml_text):
root = ET.fromstring(xml_text)
for elem in list(root.iter()):
if isinstance(elem.tag, str) and 'Comment' in elem.tag:
elem.parent.remove(elem)
#return ET.tostring(root, encoding='unicode', method="xml")
return ET.tostring(root, encoding='unicode', method="xml")[:8000] # hack - top N characters to keep context document under token max
def read_file_content(file):
if file.type == "application/json":
content = json.load(file)
return str(content)
elif file.type == "text/html":
content = BeautifulSoup(file, "html.parser")
return content.text
elif file.type == "application/xml" or file.type == "text/xml":
tree = ET.parse(file)
root = tree.getroot()
#return ET.tostring(root, encoding='unicode')
return CompressXML(ET.tostring(root, encoding='unicode'))
elif file.type == "text/plain":
return file.getvalue().decode()
else:
return ""
def main():
st.title("Chat with AI")
prompts = ['']
file_content = ""
user_prompt = st.text_area("Your question:", '', height=120)
uploaded_file = st.file_uploader("Choose a file", type=["xml", "json", "htm", "txt"])
if user_prompt:
prompts.append(user_prompt)
if uploaded_file is not None:
file_content = read_file_content(uploaded_file)
prompts.append(file_content)
if st.button('๐Ÿ’ฌ Chat'):
st.write('Chatting with GPT-3...')
response = chat_with_model(prompts)
st.write('Response:')
st.write(response)
filename = generate_filename(user_prompt)
create_file(filename, user_prompt, response)
st.sidebar.markdown(get_table_download_link(filename), unsafe_allow_html=True)
if len(file_content) > 0:
st.markdown(f"**Content Added to Prompt:**\n{file_content}")
htm_files = glob.glob("*.txt")
for file in htm_files:
st.sidebar.markdown(get_table_download_link(file), unsafe_allow_html=True)
if st.sidebar.button(f"๐Ÿ—‘Delete {file}"):
#if st.sidebar.button("๐Ÿ—‘ Delete"):
os.remove(file)
st.experimental_rerun()
if __name__ == "__main__":
main()