Spaces:
Runtime error
Runtime error
auto update text box + stream
Browse files
app.py
CHANGED
@@ -13,24 +13,24 @@ system_template = {"role": "system", "content": os.environ["content"]}
|
|
13 |
|
14 |
retrieve_all = EmbeddingRetriever(
|
15 |
document_store=FAISSDocumentStore.load(
|
16 |
-
|
17 |
-
|
18 |
-
),
|
19 |
embedding_model="sentence-transformers/multi-qa-mpnet-base-dot-v1",
|
20 |
model_format="sentence_transformers",
|
21 |
)
|
22 |
retrieve_giec = EmbeddingRetriever(
|
23 |
document_store=FAISSDocumentStore.load(
|
24 |
-
|
25 |
-
|
26 |
-
),
|
27 |
embedding_model="sentence-transformers/multi-qa-mpnet-base-dot-v1",
|
28 |
model_format="sentence_transformers",
|
29 |
)
|
30 |
|
31 |
|
32 |
-
def
|
33 |
-
retriever = retrieve_all if report_type=="All available" else retrieve_giec
|
34 |
docs = retriever.retrieve(query=query, top_k=10)
|
35 |
|
36 |
messages = history + [{"role": "user", "content": query}]
|
@@ -46,20 +46,27 @@ def gen_conv(query: str, history: list = [system_template], report_type="All ava
|
|
46 |
messages.append({"role": "system", "content": "no relevant document available."})
|
47 |
sources = "No environmental report was used to provide this answer."
|
48 |
|
49 |
-
|
50 |
"message"
|
51 |
]["content"]
|
52 |
|
53 |
-
|
54 |
-
gradio_format = make_pairs([a["content"] for a in messages[1:]])
|
55 |
|
56 |
-
|
|
|
|
|
|
|
|
|
57 |
|
58 |
|
59 |
def test(feed: str):
|
60 |
print(feed)
|
61 |
|
62 |
|
|
|
|
|
|
|
|
|
63 |
# Gradio
|
64 |
css_code = ".gradio-container {background-image: url('file=background.png');background-position: top right}"
|
65 |
|
@@ -95,7 +102,7 @@ with gr.Blocks(title="🌍 ClimateGPT Ekimetrics", css=css_code) as demo:
|
|
95 |
sources_textbox = gr.Textbox(interactive=False, show_label=False, max_lines=50)
|
96 |
|
97 |
ask.submit(
|
98 |
-
fn=
|
99 |
inputs=[
|
100 |
ask,
|
101 |
state,
|
@@ -107,6 +114,8 @@ with gr.Blocks(title="🌍 ClimateGPT Ekimetrics", css=css_code) as demo:
|
|
107 |
],
|
108 |
outputs=[chatbot, state, sources_textbox],
|
109 |
)
|
|
|
|
|
110 |
with gr.Accordion("Feedbacks", open=False):
|
111 |
gr.Markdown("Please complete some feedbacks 🙏")
|
112 |
feedback = gr.Textbox()
|
@@ -150,4 +159,4 @@ with gr.Blocks(title="🌍 ClimateGPT Ekimetrics", css=css_code) as demo:
|
|
150 |
with gr.Tab("Examples"):
|
151 |
gr.Markdown("See here some examples on how to use the Chatbot")
|
152 |
|
153 |
-
demo.launch()
|
|
|
13 |
|
14 |
retrieve_all = EmbeddingRetriever(
|
15 |
document_store=FAISSDocumentStore.load(
|
16 |
+
index_path="./documents/climate_gpt.faiss",
|
17 |
+
config_path="./documents/climate_gpt.json",
|
18 |
+
),
|
19 |
embedding_model="sentence-transformers/multi-qa-mpnet-base-dot-v1",
|
20 |
model_format="sentence_transformers",
|
21 |
)
|
22 |
retrieve_giec = EmbeddingRetriever(
|
23 |
document_store=FAISSDocumentStore.load(
|
24 |
+
index_path="./documents/climate_gpt_only_giec.faiss",
|
25 |
+
config_path="./documents/climate_gpt_only_giec.json",
|
26 |
+
),
|
27 |
embedding_model="sentence-transformers/multi-qa-mpnet-base-dot-v1",
|
28 |
model_format="sentence_transformers",
|
29 |
)
|
30 |
|
31 |
|
32 |
+
def chat(query: str, history: list = [system_template], report_type="All available", threshold=0.56):
|
33 |
+
retriever = retrieve_all if report_type == "All available" else retrieve_giec
|
34 |
docs = retriever.retrieve(query=query, top_k=10)
|
35 |
|
36 |
messages = history + [{"role": "user", "content": query}]
|
|
|
46 |
messages.append({"role": "system", "content": "no relevant document available."})
|
47 |
sources = "No environmental report was used to provide this answer."
|
48 |
|
49 |
+
response = openai.ChatCompletion.create(model="gpt-3.5-turbo", messages=messages, temperature=0.2,)["choices"][0][
|
50 |
"message"
|
51 |
]["content"]
|
52 |
|
53 |
+
complete_response = ""
|
|
|
54 |
|
55 |
+
for chunk in response:
|
56 |
+
complete_response += chunk["choices"][0]["delta"].get("content", "")
|
57 |
+
messages[-1] = {"role": "assistant", "content": complete_response}
|
58 |
+
gradio_format = make_pairs([a["content"] for a in messages[1:]])
|
59 |
+
yield gradio_format, messages, sources
|
60 |
|
61 |
|
62 |
def test(feed: str):
|
63 |
print(feed)
|
64 |
|
65 |
|
66 |
+
def reset_textbox():
|
67 |
+
return gr.update(value="")
|
68 |
+
|
69 |
+
|
70 |
# Gradio
|
71 |
css_code = ".gradio-container {background-image: url('file=background.png');background-position: top right}"
|
72 |
|
|
|
102 |
sources_textbox = gr.Textbox(interactive=False, show_label=False, max_lines=50)
|
103 |
|
104 |
ask.submit(
|
105 |
+
fn=chat,
|
106 |
inputs=[
|
107 |
ask,
|
108 |
state,
|
|
|
114 |
],
|
115 |
outputs=[chatbot, state, sources_textbox],
|
116 |
)
|
117 |
+
ask.submit(reset_textbox, [], [ask])
|
118 |
+
|
119 |
with gr.Accordion("Feedbacks", open=False):
|
120 |
gr.Markdown("Please complete some feedbacks 🙏")
|
121 |
feedback = gr.Textbox()
|
|
|
159 |
with gr.Tab("Examples"):
|
160 |
gr.Markdown("See here some examples on how to use the Chatbot")
|
161 |
|
162 |
+
demo.launch(concurrency_count=16)
|