Spaces:
Running
Running
Merge pull request #3 from TuanaCelik/requesting-openai-key
Browse files- app.py +42 -36
- utils/config.py +1 -2
- utils/haystack.py +9 -9
- utils/ui.py +27 -1
app.py
CHANGED
@@ -3,11 +3,13 @@ from annotated_text import annotation
|
|
3 |
from json import JSONDecodeError
|
4 |
import logging
|
5 |
from markdown import markdown
|
|
|
6 |
|
7 |
import streamlit as st
|
8 |
|
9 |
-
from utils.haystack import query
|
10 |
from utils.ui import reset_results, set_initial_state, sidebar
|
|
|
11 |
|
12 |
set_initial_state()
|
13 |
|
@@ -15,38 +17,42 @@ sidebar()
|
|
15 |
|
16 |
st.write("# π€ What have they been tweeting about lately?")
|
17 |
|
18 |
-
|
19 |
-
|
20 |
-
|
21 |
-
|
22 |
-
|
23 |
-
with
|
24 |
-
|
25 |
-
|
26 |
-
|
27 |
-
|
28 |
-
|
29 |
-
|
30 |
-
|
31 |
-
|
32 |
-
|
33 |
-
if
|
34 |
-
|
35 |
-
|
36 |
-
|
37 |
-
|
38 |
-
|
39 |
-
|
40 |
-
|
41 |
-
|
42 |
-
|
43 |
-
|
44 |
-
|
45 |
-
|
46 |
-
|
47 |
-
|
48 |
-
|
49 |
-
|
50 |
-
|
51 |
-
|
52 |
-
|
|
|
|
|
|
|
|
|
|
3 |
from json import JSONDecodeError
|
4 |
import logging
|
5 |
from markdown import markdown
|
6 |
+
import requests
|
7 |
|
8 |
import streamlit as st
|
9 |
|
10 |
+
from utils.haystack import query, start_haystack
|
11 |
from utils.ui import reset_results, set_initial_state, sidebar
|
12 |
+
from utils.config import TWITTER_BEARER
|
13 |
|
14 |
set_initial_state()
|
15 |
|
|
|
17 |
|
18 |
st.write("# π€ What have they been tweeting about lately?")
|
19 |
|
20 |
+
if st.session_state.get("OPENAI_API_KEY"):
|
21 |
+
prompter, template = start_haystack(st.session_state.get("OPENAI_API_KEY"))
|
22 |
+
st.session_state["api_key_configured"] = True
|
23 |
+
search_bar, button = st.columns(2)
|
24 |
+
# Search bar
|
25 |
+
with search_bar:
|
26 |
+
username = st.text_input("Please provide a twitter username", on_change=reset_results)
|
27 |
+
|
28 |
+
with button:
|
29 |
+
st.write("")
|
30 |
+
st.write("")
|
31 |
+
run_pressed = st.button("Search tweets")
|
32 |
+
else:
|
33 |
+
st.write("Please provide your OpenAI Key to start using the application")
|
34 |
+
|
35 |
+
if st.session_state.get("api_key_configured"):
|
36 |
+
run_query = (
|
37 |
+
run_pressed or username != st.session_state.username
|
38 |
+
)
|
39 |
+
|
40 |
+
# Get results for query
|
41 |
+
if run_query and username:
|
42 |
+
reset_results()
|
43 |
+
st.session_state.username = username
|
44 |
+
with st.spinner("π"):
|
45 |
+
try:
|
46 |
+
st.session_state.result = query(username, prompter, template)
|
47 |
+
except JSONDecodeError as je:
|
48 |
+
st.error(
|
49 |
+
"π An error occurred reading the results. Is the document store working?"
|
50 |
+
)
|
51 |
+
except Exception as e:
|
52 |
+
logging.exception(e)
|
53 |
+
st.error("π An error occurred during the request.")
|
54 |
+
|
55 |
+
if st.session_state.result:
|
56 |
+
voice = st.session_state.result
|
57 |
+
st.write(voice[0])
|
58 |
+
|
utils/config.py
CHANGED
@@ -2,5 +2,4 @@ import os
|
|
2 |
from dotenv import load_dotenv
|
3 |
|
4 |
load_dotenv()
|
5 |
-
TWITTER_BEARER = os.getenv('TWITTER_BEARER_TOKEN')
|
6 |
-
OPEN_AI_KEY = os.getenv('OPEN_AI_KEY')
|
|
|
2 |
from dotenv import load_dotenv
|
3 |
|
4 |
load_dotenv()
|
5 |
+
TWITTER_BEARER = os.getenv('TWITTER_BEARER_TOKEN')
|
|
utils/haystack.py
CHANGED
@@ -1,16 +1,15 @@
|
|
1 |
import streamlit as st
|
2 |
import requests
|
3 |
-
from utils.config import TWITTER_BEARER
|
4 |
|
5 |
from haystack.nodes import PromptNode, PromptTemplate
|
6 |
|
7 |
# cached to make index and models load only at start
|
8 |
-
@st.cache(
|
9 |
-
|
10 |
-
)
|
11 |
-
def start_haystack():
|
12 |
#Use this function to contruct a pipeline
|
13 |
-
prompt_node = PromptNode(model_name_or_path="text-davinci-003", api_key=
|
|
|
14 |
|
15 |
twitter_template = PromptTemplate(name="twitter-voice", prompt_text="""You will be given a twitter stream belonging to a specific profile. Answer with a summary of what they've lately been tweeting about and in what languages.
|
16 |
You may go into some detail about what topics they tend to like tweeting about. Please also mention their overall tone, for example: positive,
|
@@ -37,12 +36,13 @@ def start_haystack():
|
|
37 |
|
38 |
Summary:
|
39 |
""")
|
|
|
|
|
40 |
return prompt_node, twitter_template
|
41 |
|
42 |
-
prompter, template = start_haystack()
|
43 |
|
44 |
-
@st.cache(show_spinner=False, allow_output_mutation=True)
|
45 |
-
def query(username):
|
46 |
|
47 |
bearer_token = TWITTER_BEARER
|
48 |
|
|
|
1 |
import streamlit as st
|
2 |
import requests
|
3 |
+
from utils.config import TWITTER_BEARER
|
4 |
|
5 |
from haystack.nodes import PromptNode, PromptTemplate
|
6 |
|
7 |
# cached to make index and models load only at start
|
8 |
+
@st.cache(hash_funcs={"builtins.CoreBPE": lambda _: None}, show_spinner=False, allow_output_mutation=True)
|
9 |
+
def start_haystack(openai_key):
|
|
|
|
|
10 |
#Use this function to contruct a pipeline
|
11 |
+
prompt_node = PromptNode(model_name_or_path="text-davinci-003", api_key=openai_key)
|
12 |
+
|
13 |
|
14 |
twitter_template = PromptTemplate(name="twitter-voice", prompt_text="""You will be given a twitter stream belonging to a specific profile. Answer with a summary of what they've lately been tweeting about and in what languages.
|
15 |
You may go into some detail about what topics they tend to like tweeting about. Please also mention their overall tone, for example: positive,
|
|
|
36 |
|
37 |
Summary:
|
38 |
""")
|
39 |
+
|
40 |
+
st.session_state["haystack_started"] = True
|
41 |
return prompt_node, twitter_template
|
42 |
|
|
|
43 |
|
44 |
+
@st.cache(hash_funcs={"builtins.CoreBPE": lambda _: None}, show_spinner=False, allow_output_mutation=True)
|
45 |
+
def query(username, prompter, template):
|
46 |
|
47 |
bearer_token = TWITTER_BEARER
|
48 |
|
utils/ui.py
CHANGED
@@ -8,10 +8,14 @@ def set_state_if_absent(key, value):
|
|
8 |
def set_initial_state():
|
9 |
set_state_if_absent("username", "Provide a Twitter username")
|
10 |
set_state_if_absent("result", None)
|
|
|
11 |
|
12 |
def reset_results(*args):
|
13 |
st.session_state.result = None
|
14 |
|
|
|
|
|
|
|
15 |
def sidebar():
|
16 |
with st.sidebar:
|
17 |
image = Image.open('logo/haystack-logo-colored.png')
|
@@ -21,6 +25,25 @@ def sidebar():
|
|
21 |
"**Take results with a grain of** π§\n\n"
|
22 |
"For more on how this was built, instructions to run locally and to contribute: [visit GitHub](https://github.com/TuanaCelik/should-i-follow#readme)")
|
23 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
24 |
st.markdown(
|
25 |
"## How this works\n"
|
26 |
"This app was built with [Haystack](https://haystack.deepset.ai) using the"
|
@@ -30,4 +53,7 @@ def sidebar():
|
|
30 |
"You can see how the `PromptNode` was set up [here](https://github.com/TuanaCelik/should-i-follow/blob/main/utils/haystack.py)")
|
31 |
st.markdown("---")
|
32 |
st.markdown("Made by [tuanacelik](https://twitter.com/tuanacelik)")
|
33 |
-
st.
|
|
|
|
|
|
|
|
8 |
def set_initial_state():
|
9 |
set_state_if_absent("username", "Provide a Twitter username")
|
10 |
set_state_if_absent("result", None)
|
11 |
+
set_state_if_absent("haystack_started", False)
|
12 |
|
13 |
def reset_results(*args):
|
14 |
st.session_state.result = None
|
15 |
|
16 |
+
def set_openai_api_key(api_key: str):
|
17 |
+
st.session_state["OPENAI_API_KEY"] = api_key
|
18 |
+
|
19 |
def sidebar():
|
20 |
with st.sidebar:
|
21 |
image = Image.open('logo/haystack-logo-colored.png')
|
|
|
25 |
"**Take results with a grain of** π§\n\n"
|
26 |
"For more on how this was built, instructions to run locally and to contribute: [visit GitHub](https://github.com/TuanaCelik/should-i-follow#readme)")
|
27 |
|
28 |
+
st.markdown(
|
29 |
+
"## How to use\n"
|
30 |
+
"1. Enter your [OpenAI API key](https://platform.openai.com/account/api-keys) below\n"
|
31 |
+
"2. Enter a Twitter username in the searchbar\n"
|
32 |
+
"3. Enjoy π€\n"
|
33 |
+
)
|
34 |
+
|
35 |
+
api_key_input = st.text_input(
|
36 |
+
"OpenAI API Key",
|
37 |
+
type="password",
|
38 |
+
placeholder="Paste your OpenAI API key here (sk-...)",
|
39 |
+
help="You can get your API key from https://platform.openai.com/account/api-keys.",
|
40 |
+
value=st.session_state.get("OPENAI_API_KEY", ""),
|
41 |
+
)
|
42 |
+
|
43 |
+
if api_key_input:
|
44 |
+
set_openai_api_key(api_key_input)
|
45 |
+
|
46 |
+
st.markdown("---")
|
47 |
st.markdown(
|
48 |
"## How this works\n"
|
49 |
"This app was built with [Haystack](https://haystack.deepset.ai) using the"
|
|
|
53 |
"You can see how the `PromptNode` was set up [here](https://github.com/TuanaCelik/should-i-follow/blob/main/utils/haystack.py)")
|
54 |
st.markdown("---")
|
55 |
st.markdown("Made by [tuanacelik](https://twitter.com/tuanacelik)")
|
56 |
+
st.markdown("---")
|
57 |
+
st.markdown("""Thanks to [mmz_001](https://twitter.com/mm_sasmitha)
|
58 |
+
for open sourcing [KnowledgeGPT](https://knowledgegpt.streamlit.app/) which helped me with this sidebar ππ½""")
|
59 |
+
st.image(image, width=250)
|