Tuana commited on
Commit
e283888
β€’
1 Parent(s): 391c720

adding sidebar for open ai key

Browse files
Files changed (3) hide show
  1. app.py +46 -37
  2. utils/haystack.py +8 -9
  3. utils/ui.py +35 -0
app.py CHANGED
@@ -3,48 +3,57 @@ from annotated_text import annotation
3
  from json import JSONDecodeError
4
  import logging
5
  from markdown import markdown
 
6
 
7
  import streamlit as st
8
 
9
- from utils.haystack import query
10
- from utils.ui import reset_results, set_initial_state
 
11
 
12
  set_initial_state()
13
 
14
  st.write("# 🐀 What have they been tweeting about lately?")
15
 
16
- search_bar, button = st.columns(2)
17
- # Search bar
18
- with search_bar:
19
- username = st.text_input("Please provide a twitter username", on_change=reset_results)
20
-
21
- with button:
22
- st.write("")
23
- st.write("")
24
- run_pressed = st.button("Search tweets")
25
-
26
- run_query = (
27
- run_pressed or username != st.session_state.username
28
- )
29
-
30
- # Get results for query
31
- if run_query and username:
32
- reset_results()
33
- st.session_state.username = username
34
- with st.spinner("πŸ”Ž"):
35
- try:
36
- st.session_state.result = query(username)
37
- except JSONDecodeError as je:
38
- st.error(
39
- "πŸ‘“    An error occurred reading the results. Is the document store working?"
40
- )
41
- except Exception as e:
42
- logging.exception(e)
43
- st.error("🐞    An error occurred during the request.")
44
-
45
-
46
-
47
- if st.session_state.result:
48
- voice = st.session_state.result
49
- st.write(voice[0])
50
-
 
 
 
 
 
 
 
 
3
  from json import JSONDecodeError
4
  import logging
5
  from markdown import markdown
6
+ import requests
7
 
8
  import streamlit as st
9
 
10
+ from utils.haystack import start_haystack, query
11
+ from utils.ui import reset_results, set_initial_state, sidebar
12
+ from utils.config import TWITTER_BEARER
13
 
14
  set_initial_state()
15
 
16
  st.write("# 🐀 What have they been tweeting about lately?")
17
 
18
+ sidebar()
19
+
20
+
21
+ if st.session_state.get("OPENAI_API_KEY"):
22
+ prompter, template = start_haystack(st.session_state.get("OPENAI_API_KEY"))
23
+ st.session_state["api_key_configured"] = True
24
+ search_bar, button = st.columns(2)
25
+ # Search bar
26
+ with search_bar:
27
+ username = st.text_input("Please provide a twitter username", on_change=reset_results)
28
+
29
+ with button:
30
+ st.write("")
31
+ st.write("")
32
+ run_pressed = st.button("Search tweets")
33
+ else:
34
+ st.write("Please provide your OpenAI Key to start using the application")
35
+
36
+ if st.session_state.get("api_key_configured"):
37
+ run_query = (
38
+ run_pressed or username != st.session_state.username
39
+ )
40
+
41
+ # Get results for query
42
+ if run_query and username:
43
+ reset_results()
44
+ st.session_state.username = username
45
+ with st.spinner("πŸ”Ž"):
46
+ try:
47
+ st.session_state.result = query(username, prompter, template)
48
+ except JSONDecodeError as je:
49
+ st.error(
50
+ "πŸ‘“    An error occurred reading the results. Is the document store working?"
51
+ )
52
+ except Exception as e:
53
+ logging.exception(e)
54
+ st.error("🐞    An error occurred during the request.")
55
+
56
+ if st.session_state.result:
57
+ voice = st.session_state.result
58
+ st.write(voice[0])
59
+
utils/haystack.py CHANGED
@@ -1,16 +1,14 @@
1
  import streamlit as st
2
  import requests
3
- from utils.config import TWITTER_BEARER, OEPN_AI_KEY
4
 
5
  from haystack.nodes import PromptNode, PromptTemplate
6
 
7
  # cached to make index and models load only at start
8
- @st.cache(
9
- hash_funcs={"builtins.SwigPyObject": lambda _: None}, allow_output_mutation=True
10
- )
11
- def start_haystack():
12
  #Use this function to contruct a pipeline
13
- prompt_node = PromptNode(model_name_or_path="text-davinci-003", api_key=OEPN_AI_KEY)
14
 
15
  twitter_template = PromptTemplate(name="twitter-voice", prompt_text="""You will be given a twitter stream belonging to a specific profile. Answer with a summary of what they've lately been tweeting about and in what languages.
16
  You may go into some detail about what topics they tend to like tweeting about. Please also mention their overall tone, for example: positive,
@@ -37,12 +35,13 @@ def start_haystack():
37
 
38
  Summary:
39
  """)
 
 
40
  return prompt_node, twitter_template
41
 
42
- prompter, template = start_haystack()
43
 
44
- @st.cache(show_spinner=False, allow_output_mutation=True)
45
- def query(username):
46
 
47
  bearer_token = TWITTER_BEARER
48
 
 
1
  import streamlit as st
2
  import requests
3
+ from utils.config import TWITTER_BEARER
4
 
5
  from haystack.nodes import PromptNode, PromptTemplate
6
 
7
  # cached to make index and models load only at start
8
+ @st.cache(hash_funcs={"builtins.CoreBPE": lambda _: None}, show_spinner=False, allow_output_mutation=True)
9
+ def start_haystack(openai_key):
 
 
10
  #Use this function to contruct a pipeline
11
+ prompt_node = PromptNode(model_name_or_path="text-davinci-003", api_key=openai_key)
12
 
13
  twitter_template = PromptTemplate(name="twitter-voice", prompt_text="""You will be given a twitter stream belonging to a specific profile. Answer with a summary of what they've lately been tweeting about and in what languages.
14
  You may go into some detail about what topics they tend to like tweeting about. Please also mention their overall tone, for example: positive,
 
35
 
36
  Summary:
37
  """)
38
+
39
+ st.session_state["haystack_started"] = True
40
  return prompt_node, twitter_template
41
 
 
42
 
43
+ @st.cache(hash_funcs={"builtins.CoreBPE": lambda _: None}, show_spinner=False, allow_output_mutation=True)
44
+ def query(username, prompter, template):
45
 
46
  bearer_token = TWITTER_BEARER
47
 
utils/ui.py CHANGED
@@ -7,6 +7,41 @@ def set_state_if_absent(key, value):
7
  def set_initial_state():
8
  set_state_if_absent("username", "Provide a Twitter username")
9
  set_state_if_absent("result", None)
 
10
 
11
  def reset_results(*args):
12
  st.session_state.result = None
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
7
  def set_initial_state():
8
  set_state_if_absent("username", "Provide a Twitter username")
9
  set_state_if_absent("result", None)
10
+ set_state_if_absent("haystack_started", False)
11
 
12
  def reset_results(*args):
13
  st.session_state.result = None
14
+
15
+ def set_openai_api_key(api_key: str):
16
+ st.session_state["OPENAI_API_KEY"] = api_key
17
+
18
+ def sidebar():
19
+ with st.sidebar:
20
+ st.markdown(
21
+ "## How to use\n"
22
+ "1. Enter your [OpenAI API key](https://platform.openai.com/account/api-keys) belowπŸ”‘\n"
23
+ "2. Enter a twitter username in the searchbar\n"
24
+ "3. Enjoy πŸ€—\n"
25
+ )
26
+ api_key_input = st.text_input(
27
+ "OpenAI API Key",
28
+ type="password",
29
+ placeholder="Paste your OpenAI API key here (sk-...)",
30
+ help="You can get your API key from https://platform.openai.com/account/api-keys.",
31
+ value=st.session_state.get("OPENAI_API_KEY", ""),
32
+ )
33
+
34
+ if api_key_input:
35
+ set_openai_api_key(api_key_input)
36
+
37
+ st.markdown("---")
38
+ st.markdown("### About\n"
39
+ "This app is just for fun and there are many points of improvement."
40
+ "It was built with [Haystack](https://haystack.deepset.ai) using the"
41
+ " [PromptNode](https://docs.haystack.deepset.ai/docs/prompt_node) and custom [PromptTemplate](https://docs.haystack.deepset.ai/docs/prompt_node#templates)"
42
+ "The source code is also on [GitHub](https://github.com/TuanaCelik/should-i-follow)"
43
+ "with instructions to run locally.")
44
+ st.markdown("Made by [tuanacelik](https://twitter.com/tuanacelik)")
45
+ st.markdown("---")
46
+ st.markdown("""Thanks to [mmz_001](https://twitter.com/mm_sasmitha)
47
+ for open sourcing [KnowledgeGPT](https://knowledgegpt.streamlit.app/) which helped me with this sidebar πŸ™πŸ½""")