hanoch.rahimi@gmail commited on
Commit
d54eee9
·
1 Parent(s): a30e3b1

assistant wip

Browse files
Files changed (3) hide show
  1. app.py +26 -3
  2. openai_utils.py +97 -13
  3. utils.py +3 -1
app.py CHANGED
@@ -19,7 +19,6 @@ import openai_utils as oai
19
  PINECONE_KEY = st.secrets["PINECONE_API_KEY"] # app.pinecone.io
20
  OPENAI_API_KEY = st.secrets["OPENAI_API_KEY"] # app.pinecone.io
21
  PINE_CONE_ENVIRONMENT = st.secrets["PINE_CONE_ENVIRONMENT"] # app.pinecone.io
22
- OPENAI_ORGANIZATION_ID = st.secrets["OPENAI_ORGANIZATION_ID"]
23
  model_name = 'text-embedding-ada-002'
24
 
25
  embed = OpenAIEmbeddings(
@@ -51,7 +50,7 @@ def init_models():
51
  return retriever, tokenizer#, vectorstore
52
 
53
 
54
- st.session_state.openai_client = openai.OpenAI(api_key = OPENAI_API_KEY,organization=OPENAI_ORGANIZATION_ID)
55
  retriever, tokenizer = init_models()
56
  #st.session_state.messages = [{"role":"system", "content":"You are an assistant who helps users find startups to invest in."}]
57
 
@@ -191,6 +190,26 @@ def run_query(query, report_type, top_k , regions, countries, is_debug, index_na
191
  elif report_type=="company_list": # or st.session_state.new_conversation:
192
  results = search_index(query, top_k, regions, countries)
193
  descriptions = "\n".join([f"Description of company \"{res['name']}\": {res['data']['Summary']}.\n" for res in results[:20] if 'Summary' in res['data']])
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
194
  else:
195
  st.session_state.new_conversation = False
196
 
@@ -355,7 +374,8 @@ if utils.check_password():
355
  with tab_advanced:
356
  #prompt_title = st.selectbox("Report Type", index = 0, options = utils.get_prompts(), on_change=on_prompt_selected, key="advanced_prompts_select", )
357
  #prompt_title_editable = st.text_input("Title", key="prompt_title_editable")
358
- report_type = st.selectbox(label="Response Type", options=["standard", "guided", "company_list", "clustered"], index=0)
 
359
  default_prompt = st.text_area("Default Prompt", value = utils.default_prompt, height=400, key="advanced_default_prompt_content")
360
  clustering_prompt = st.text_area("Clustering Prompt", value = utils.clustering_prompt, height=400, key="advanced_clustering_prompt_content")
361
  #prompt_new = st.button("New", on_click = _prompt(prompt_title, prompt))
@@ -387,5 +407,8 @@ if utils.check_password():
387
  # prompt = "guided"
388
  # else:
389
  # prompt = ""
 
 
 
390
  run_query(query, report_type, top_k, region_selectbox, countries_selectbox, is_debug, index_namespace, openai_model)
391
 
 
19
  PINECONE_KEY = st.secrets["PINECONE_API_KEY"] # app.pinecone.io
20
  OPENAI_API_KEY = st.secrets["OPENAI_API_KEY"] # app.pinecone.io
21
  PINE_CONE_ENVIRONMENT = st.secrets["PINE_CONE_ENVIRONMENT"] # app.pinecone.io
 
22
  model_name = 'text-embedding-ada-002'
23
 
24
  embed = OpenAIEmbeddings(
 
50
  return retriever, tokenizer#, vectorstore
51
 
52
 
53
+ st.session_state.openai_client = oai.get_client()
54
  retriever, tokenizer = init_models()
55
  #st.session_state.messages = [{"role":"system", "content":"You are an assistant who helps users find startups to invest in."}]
56
 
 
190
  elif report_type=="company_list": # or st.session_state.new_conversation:
191
  results = search_index(query, top_k, regions, countries)
192
  descriptions = "\n".join([f"Description of company \"{res['name']}\": {res['data']['Summary']}.\n" for res in results[:20] if 'Summary' in res['data']])
193
+ elif report_type=="assistant":
194
+ results = search_index(query, top_k, regions, countries)
195
+ descriptions = "\n".join([f"Description of company \"{res['name']}\": {res['data']['Summary']}.\n" for res in results[:20] if 'Summary' in res['data']])
196
+ ntokens = len(descriptions.split(" "))
197
+ # prompt = utils.clustering_prompt if report_type=="clustered" else utils.default_prompt
198
+ # prompt_txt = prompt + """
199
+ # User query: {query}
200
+ # Company descriptions: {descriptions}
201
+ # """
202
+ # prompt_template = PromptTemplate(template=prompt_txt, input_variables=["descriptions", "query"])
203
+ # prompt = prompt_template.format(descriptions = descriptions, query = query)
204
+ #print(f"==============================\nPrompt:\n{prompt[:1000]}\n==============================\n")
205
+ prompt = query
206
+ m_text = oai.call_openai(prompt, engine=openai_model, temp=0, top_p=1.0)
207
+ m_text
208
+ st.session_state.messages.append({"role": "user", "content": query})
209
+ i = m_text.find("-----")
210
+ i = 0 if i<0 else i
211
+ st.session_state.messages.append({"role": "system", "content": m_text[:i]})
212
+
213
  else:
214
  st.session_state.new_conversation = False
215
 
 
374
  with tab_advanced:
375
  #prompt_title = st.selectbox("Report Type", index = 0, options = utils.get_prompts(), on_change=on_prompt_selected, key="advanced_prompts_select", )
376
  #prompt_title_editable = st.text_input("Title", key="prompt_title_editable")
377
+ report_type = st.selectbox(label="Response Type", options=["assistant", "standard", "guided", "company_list", "clustered"], index=0)
378
+ assistant_id = st.text_input(label="Assistant ID", key="assistant_id", value="asst_fkZtxo127nxKOCcwrwznuCs2")
379
  default_prompt = st.text_area("Default Prompt", value = utils.default_prompt, height=400, key="advanced_default_prompt_content")
380
  clustering_prompt = st.text_area("Clustering Prompt", value = utils.clustering_prompt, height=400, key="advanced_clustering_prompt_content")
381
  #prompt_new = st.button("New", on_click = _prompt(prompt_title, prompt))
 
407
  # prompt = "guided"
408
  # else:
409
  # prompt = ""
410
+ oai.start_conversation()
411
+ #st.session_state.assistant_id = assistant_id
412
+ st.session_state.report_type = report_type
413
  run_query(query, report_type, top_k, region_selectbox, countries_selectbox, is_debug, index_namespace, openai_model)
414
 
openai_utils.py CHANGED
@@ -1,24 +1,108 @@
1
  import time
 
 
2
  import streamlit as st
3
 
4
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
5
  def call_openai(prompt, engine="gpt-3.5-turbo", temp=0, top_p=1.0, max_tokens=4048):
6
- try:
7
- response = st.session_state.openai_client.chat.completions.create(
8
- model=engine,
9
- messages=st.session_state.messages + [{"role": "user", "content": prompt}],
10
- temperature=temp,
11
- max_tokens=max_tokens
12
- )
13
- print(f"====================\nOpen AI response\n {response}\n====================\n")
14
- text = response.choices[0].message.content.strip()
15
- return text
16
- except Exception as e:
17
- #except openai.error.OpenAIError as e:
18
- print(f"An error occurred: {str(e)}")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
19
  return "Failed to generate a response."
20
 
21
 
 
 
 
22
  def send_message(role, content):
23
  message = st.session_state.openai_client.beta.threads.messages.create(
24
  thread_id=st.session_state.assistant_thread.id,
 
1
  import time
2
+ import openai
3
+ import requests
4
  import streamlit as st
5
 
6
 
7
+
8
+ OPENAI_API_KEY = st.secrets["OPENAI_API_KEY"] # app.pinecone.io
9
+ OPENAI_ORGANIZATION_ID = st.secrets["OPENAI_ORGANIZATION_ID"]
10
+
11
+
12
+ headers = {"Content-Type": "application/json",
13
+ "Authorization": f"Bearer {OPENAI_API_KEY}"
14
+ }
15
+
16
+ SEED = 42
17
+
18
+ def get_client():
19
+ return openai.OpenAI(api_key = OPENAI_API_KEY,organization=OPENAI_ORGANIZATION_ID)
20
+
21
  def call_openai(prompt, engine="gpt-3.5-turbo", temp=0, top_p=1.0, max_tokens=4048):
22
+ if st.session_state.report_type=="assistant":
23
+ try:
24
+ thread = st.session_state.assistant_thread
25
+ assistant_id = st.session_state.assistant_id
26
+ message = st.session_state.openai_client.beta.threads.messages.create(
27
+ thread.id,
28
+ role="user",
29
+ content=prompt,
30
+ )
31
+ run = st.session_state.openai_client.beta.threads.runs.create(
32
+ thread_id=thread.id,
33
+ assistant_id=assistant_id,
34
+ instructions="Please address the user as Dan"
35
+ )
36
+ messages = []
37
+ while True:
38
+ # Retrieve the run status
39
+ run_status = st.session_state.openai_client.beta.threads.runs.retrieve(
40
+ thread_id=thread.id,
41
+ run_id=run.id
42
+ )
43
+
44
+ # Check and print the step details
45
+ run_steps = st.session_state.openai_client.beta.threads.runs.steps.list(
46
+ thread_id=thread.id,
47
+ run_id=run.id
48
+ )
49
+ for step in run_steps.data:
50
+ if step.type == 'tool_calls':
51
+ print(f"Tool {step.type} invoked.")
52
+
53
+ # If step involves code execution, print the code
54
+ if step.type == 'code_interpreter':
55
+ print(f"Python Code Executed: {step.step_details['code_interpreter']['input']}")
56
+
57
+ if run_status.status == 'completed':
58
+ # Retrieve all messages from the thread
59
+ messages = st.session_state.openai_client.beta.threads.messages.list(
60
+ thread_id=thread.id
61
+ )
62
+
63
+ # Print all messages from the thread
64
+ for msg in messages.data:
65
+ role = msg.role
66
+ content = msg.content[0].text.value
67
+ print(f"{role.capitalize()}: {content}")
68
+ break # Exit the polling loop since the run is complete
69
+ elif run_status.status in ['queued', 'in_progress']:
70
+ print(f'{run_status.status.capitalize()}... Please wait.')
71
+ time.sleep(1.5) # Wait before checking again
72
+ else:
73
+ print(f"Run status: {run_status.status}")
74
+ break # Exit the polling loop if the status is neither 'in_progress' nor 'completed'
75
+
76
+
77
+ print(f"====================\nOpen AI response\n {messages}\n====================\n")
78
+ text = ""
79
+ for message in messages:
80
+ text = text + "\n" + message.content[0].text.value
81
+ return text
82
+ except Exception as e:
83
+ #except openai.error.OpenAIError as e:
84
+ print(f"An error occurred: {str(e)}")
85
+ else:
86
+ try:
87
+ response = st.session_state.openai_client.chat.completions.create(
88
+ model=engine,
89
+ messages=st.session_state.messages + [{"role": "user", "content": prompt}],
90
+ temperature=temp,
91
+ seed = SEED,
92
+ max_tokens=max_tokens
93
+ )
94
+ print(f"====================\nOpen AI response\n {response}\n====================\n")
95
+ text = response.choices[0].message.content.strip()
96
+ return text
97
+ except Exception as e:
98
+ #except openai.error.OpenAIError as e:
99
+ print(f"An error occurred: {str(e)}")
100
  return "Failed to generate a response."
101
 
102
 
103
+ def get_assistant(assistant_id):
104
+ return st.session_state.openai_client.beta.assistants.retrieve(assistant_id)
105
+
106
  def send_message(role, content):
107
  message = st.session_state.openai_client.beta.threads.messages.create(
108
  thread_id=st.session_state.assistant_thread.id,
utils.py CHANGED
@@ -18,11 +18,13 @@ import openai
18
  # )
19
 
20
  ###
21
-
22
 
23
  def check_password():
24
  """Returns `True` if the user had the correct password."""
25
 
 
 
 
26
  def password_entered():
27
  """Checks whether a password entered by the user is correct."""
28
  if st.session_state["password"] == st.secrets["password"]:
 
18
  # )
19
 
20
  ###
 
21
 
22
  def check_password():
23
  """Returns `True` if the user had the correct password."""
24
 
25
+ if st.secrets['SKIP_PASSWORD']=="True":
26
+ return True
27
+
28
  def password_entered():
29
  """Checks whether a password entered by the user is correct."""
30
  if st.session_state["password"] == st.secrets["password"]: