Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
@@ -6,7 +6,6 @@ import glob
|
|
6 |
import json
|
7 |
import mistune
|
8 |
import pytz
|
9 |
-
import textwrap
|
10 |
|
11 |
from datetime import datetime
|
12 |
from openai import ChatCompletion
|
@@ -64,7 +63,7 @@ def get_table_download_link(file_path):
|
|
64 |
data = file.read()
|
65 |
b64 = base64.b64encode(data.encode()).decode()
|
66 |
file_name = os.path.basename(file_path)
|
67 |
-
ext = os.path.splitext(file_name)[1]
|
68 |
|
69 |
if ext == '.txt':
|
70 |
mime_type = 'text/plain'
|
@@ -73,7 +72,7 @@ def get_table_download_link(file_path):
|
|
73 |
elif ext == '.md':
|
74 |
mime_type = 'text/markdown'
|
75 |
else:
|
76 |
-
mime_type = 'application/octet-stream'
|
77 |
|
78 |
href = f'<a href="data:{mime_type};base64,{b64}" target="_blank" download="{file_name}">{file_name}</a>'
|
79 |
return href
|
@@ -107,40 +106,43 @@ def read_file_content(file,max_length):
|
|
107 |
return ""
|
108 |
|
109 |
def main():
|
110 |
-
|
|
|
|
|
111 |
uploaded_file = st.file_uploader("Choose a file", type=["xml", "json", "html", "htm", "md", "txt"])
|
112 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
113 |
if st.button('π¬ Chat'):
|
114 |
st.write('Thinking and Reasoning with your inputs...')
|
115 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
116 |
|
117 |
-
|
118 |
-
|
119 |
-
|
120 |
-
|
121 |
-
|
122 |
-
|
123 |
-
filename = generate_filename(prompt, choice)
|
124 |
-
create_file(filename, prompt, response)
|
125 |
-
st.sidebar.markdown(get_table_download_link(filename), unsafe_allow_html=True)
|
126 |
-
|
127 |
-
if uploaded_file is not None:
|
128 |
-
file_content = read_file_content(uploaded_file, max_length)
|
129 |
-
document_parts = textwrap.wrap(file_content, max_length)
|
130 |
-
for part in document_parts:
|
131 |
-
response = chat_with_model([part])
|
132 |
-
st.write('Response:')
|
133 |
-
st.write(response)
|
134 |
-
filename = generate_filename(part, choice)
|
135 |
-
create_file(filename, part, response)
|
136 |
-
st.sidebar.markdown(get_table_download_link(filename), unsafe_allow_html=True)
|
137 |
-
|
138 |
-
if len(file_content) > 0:
|
139 |
-
st.text_area("File Content:", file_content, height=400) # Display file content in a scrollable text box
|
140 |
|
141 |
all_files = glob.glob("*.txt") + glob.glob("*.htm") + glob.glob("*.md")
|
142 |
for file in all_files:
|
143 |
-
col1, col2 = st.sidebar.columns([4,1])
|
144 |
with col1:
|
145 |
st.markdown(get_table_download_link(file), unsafe_allow_html=True)
|
146 |
with col2:
|
|
|
6 |
import json
|
7 |
import mistune
|
8 |
import pytz
|
|
|
9 |
|
10 |
from datetime import datetime
|
11 |
from openai import ChatCompletion
|
|
|
63 |
data = file.read()
|
64 |
b64 = base64.b64encode(data.encode()).decode()
|
65 |
file_name = os.path.basename(file_path)
|
66 |
+
ext = os.path.splitext(file_name)[1] # get the file extension
|
67 |
|
68 |
if ext == '.txt':
|
69 |
mime_type = 'text/plain'
|
|
|
72 |
elif ext == '.md':
|
73 |
mime_type = 'text/markdown'
|
74 |
else:
|
75 |
+
mime_type = 'application/octet-stream' # general binary data type
|
76 |
|
77 |
href = f'<a href="data:{mime_type};base64,{b64}" target="_blank" download="{file_name}">{file_name}</a>'
|
78 |
return href
|
|
|
106 |
return ""
|
107 |
|
108 |
def main():
|
109 |
+
prompts = ['']
|
110 |
+
file_content = ""
|
111 |
+
user_prompt = st.text_area("Your question:", '', height=100)
|
112 |
uploaded_file = st.file_uploader("Choose a file", type=["xml", "json", "html", "htm", "md", "txt"])
|
113 |
|
114 |
+
if user_prompt:
|
115 |
+
prompts.append(user_prompt)
|
116 |
+
|
117 |
+
if uploaded_file is not None:
|
118 |
+
file_content = read_file_content(uploaded_file, max_length)
|
119 |
+
prompts.append(file_content)
|
120 |
+
|
121 |
if st.button('π¬ Chat'):
|
122 |
st.write('Thinking and Reasoning with your inputs...')
|
123 |
+
total_input = "".join(prompts)
|
124 |
+
|
125 |
+
if len(total_input) > 4000:
|
126 |
+
chunks = [total_input[i:i + max_length] for i in range(0, len(total_input), max_length)]
|
127 |
+
responses = []
|
128 |
+
for chunk in chunks:
|
129 |
+
responses.append(chat_with_model([chunk]))
|
130 |
+
response = "\n".join(responses)
|
131 |
+
else:
|
132 |
+
response = chat_with_model(prompts)
|
133 |
+
st.write('Response:')
|
134 |
+
st.write(response)
|
135 |
|
136 |
+
filename = generate_filename(user_prompt, choice)
|
137 |
+
create_file(filename, user_prompt, response)
|
138 |
+
st.sidebar.markdown(get_table_download_link(filename), unsafe_allow_html=True)
|
139 |
+
|
140 |
+
if len(file_content) > 0:
|
141 |
+
st.markdown(f"**File Content Added:**\n{file_content}")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
142 |
|
143 |
all_files = glob.glob("*.txt") + glob.glob("*.htm") + glob.glob("*.md")
|
144 |
for file in all_files:
|
145 |
+
col1, col2 = st.sidebar.columns([4,1]) # adjust the ratio as needed
|
146 |
with col1:
|
147 |
st.markdown(get_table_download_link(file), unsafe_allow_html=True)
|
148 |
with col2:
|