Spaces:
Running
Running
Update app.py
Browse files
app.py
CHANGED
@@ -19,21 +19,23 @@ import json, openai, os, time
|
|
19 |
|
20 |
from datetime import date
|
21 |
from openai import OpenAI
|
|
|
22 |
from typing import List
|
23 |
from utils import function_to_schema, show_json
|
24 |
|
25 |
-
|
|
|
26 |
|
27 |
assistant_id = "asst_YWSKwQEiYVGApJdxxrTH0Cy8"
|
28 |
|
29 |
assistant, thread = None, None
|
30 |
|
31 |
-
def today_tool() -> str:
|
32 |
"""Returns today's date. Use this function for any questions related to knowing today's date.
|
33 |
There should be no input. This function always returns today's date."""
|
34 |
return str(date.today())
|
35 |
|
36 |
-
def yf_download_tool(tickers: List[str], start_date: date, end_date: date) -> pd.DataFrame:
|
37 |
"""Returns historical stock data for a list of given tickers from start date to end date
|
38 |
using the yfinance library download function.
|
39 |
Use this function for any questions related to getting historical stock data.
|
@@ -41,13 +43,21 @@ def yf_download_tool(tickers: List[str], start_date: date, end_date: date) -> pd
|
|
41 |
This function always returns a pandas DataFrame."""
|
42 |
return yf.download(tickers, start=start_date, end=end_date)
|
43 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
44 |
tools = {
|
45 |
"today_tool": today_tool,
|
46 |
"yf_download_tool": yf_download_tool,
|
|
|
47 |
}
|
48 |
|
49 |
-
def create_assistant(
|
50 |
-
assistant =
|
51 |
name="Python Code Generator",
|
52 |
instructions=(
|
53 |
"You are a Python programming language expert that "
|
@@ -59,6 +69,7 @@ def create_assistant(client):
|
|
59 |
{"type": "code_interpreter"},
|
60 |
{"type": "function", "function": function_to_schema(today_tool)},
|
61 |
{"type": "function", "function": function_to_schema(yf_download_tool)},
|
|
|
62 |
],
|
63 |
)
|
64 |
|
@@ -66,22 +77,22 @@ def create_assistant(client):
|
|
66 |
|
67 |
return assistant
|
68 |
|
69 |
-
def load_assistant(
|
70 |
-
assistant =
|
71 |
|
72 |
show_json("assistant", assistant)
|
73 |
|
74 |
return assistant
|
75 |
|
76 |
-
def create_thread(
|
77 |
-
thread =
|
78 |
|
79 |
show_json("thread", thread)
|
80 |
|
81 |
return thread
|
82 |
|
83 |
-
def create_message(
|
84 |
-
message =
|
85 |
role="user",
|
86 |
thread_id=thread.id,
|
87 |
content=msg,
|
@@ -91,8 +102,8 @@ def create_message(client, thread, msg):
|
|
91 |
|
92 |
return message
|
93 |
|
94 |
-
def create_run(
|
95 |
-
run =
|
96 |
assistant_id=assistant.id,
|
97 |
thread_id=thread.id,
|
98 |
parallel_tool_calls=False,
|
@@ -102,14 +113,12 @@ def create_run(client, assistant, thread):
|
|
102 |
|
103 |
return run
|
104 |
|
105 |
-
def wait_on_run(
|
106 |
while run.status == "queued" or run.status == "in_progress":
|
107 |
-
run =
|
108 |
thread_id=thread.id,
|
109 |
run_id=run.id,
|
110 |
)
|
111 |
-
|
112 |
-
show_json("run", run)
|
113 |
|
114 |
time.sleep(1)
|
115 |
|
@@ -120,8 +129,8 @@ def wait_on_run(client, thread, run):
|
|
120 |
|
121 |
return run
|
122 |
|
123 |
-
def get_run_steps(
|
124 |
-
run_steps =
|
125 |
thread_id=thread.id,
|
126 |
run_id=run.id,
|
127 |
order="asc",
|
@@ -161,8 +170,8 @@ def execute_tool_calls(run_steps):
|
|
161 |
|
162 |
return tool_call_ids, tool_call_results
|
163 |
|
164 |
-
def get_messages(
|
165 |
-
messages =
|
166 |
thread_id=thread.id
|
167 |
)
|
168 |
|
@@ -224,19 +233,19 @@ def chat(message, history):
|
|
224 |
global assistant, thread
|
225 |
|
226 |
if assistant == None:
|
227 |
-
|
228 |
-
|
229 |
-
assistant = load_assistant(
|
230 |
|
231 |
if thread == None or len(history) == 0:
|
232 |
-
thread = create_thread(
|
233 |
|
234 |
-
create_message(
|
235 |
|
236 |
-
run = create_run(
|
237 |
|
238 |
-
run = wait_on_run(
|
239 |
-
run_steps = get_run_steps(
|
240 |
|
241 |
### TODO
|
242 |
tool_call_ids, tool_call_results = execute_tool_calls(run_steps)
|
@@ -264,14 +273,14 @@ def chat(message, history):
|
|
264 |
"output": tool_call_results[0]
|
265 |
}
|
266 |
|
267 |
-
run =
|
268 |
thread_id=thread.id,
|
269 |
run_id=run.id,
|
270 |
tool_outputs=[tool_output]
|
271 |
)
|
272 |
|
273 |
-
run = wait_on_run(
|
274 |
-
run_steps = get_run_steps(
|
275 |
###
|
276 |
tool_call_ids, tool_call_results = execute_tool_calls(run_steps)
|
277 |
|
@@ -298,17 +307,17 @@ def chat(message, history):
|
|
298 |
"output": tool_call_results[1]
|
299 |
}
|
300 |
|
301 |
-
run =
|
302 |
thread_id=thread.id,
|
303 |
run_id=run.id,
|
304 |
tool_outputs=[tool_output]
|
305 |
)
|
306 |
|
307 |
-
run = wait_on_run(
|
308 |
-
run_steps = get_run_steps(
|
309 |
###
|
310 |
|
311 |
-
messages = get_messages(
|
312 |
|
313 |
text_values, image_values = extract_content_values(messages)
|
314 |
|
@@ -328,8 +337,9 @@ gr.ChatInterface(
|
|
328 |
description=(
|
329 |
"The assistant can **generate, explain, fix, optimize, document, and test code**. "
|
330 |
"It can also **execute code**. "
|
331 |
-
"It has access to <b>today tool</b> (get current date)
|
332 |
-
"
|
|
|
333 |
),
|
334 |
clear_btn="Clear",
|
335 |
retry_btn=None,
|
@@ -340,7 +350,8 @@ gr.ChatInterface(
|
|
340 |
["Fix: x = [5, 2, 1, 3, 4]; print(x.sort())"],
|
341 |
["Optimize: x = []; for i in range(0, 10000): x.append(i)"],
|
342 |
["Execute: First 25 Fibbonaci numbers"],
|
343 |
-
["Execute with tools: Create a plot showing stock gain QTD for NVDA and AMD, x-axis is \"Day\" and y-axis is \"Gain %\""]
|
|
|
344 |
],
|
345 |
cache_examples=False,
|
346 |
).launch()
|
|
|
19 |
|
20 |
from datetime import date
|
21 |
from openai import OpenAI
|
22 |
+
from tavily import TavilyClient
|
23 |
from typing import List
|
24 |
from utils import function_to_schema, show_json
|
25 |
|
26 |
+
openai_client = OpenAI(api_key=os.environ.get("OPENAI_API_KEY"))
|
27 |
+
tavily_client = TavilyClient(api_key=os.environ.get("TAVILY_API_KEY"))
|
28 |
|
29 |
assistant_id = "asst_YWSKwQEiYVGApJdxxrTH0Cy8"
|
30 |
|
31 |
assistant, thread = None, None
|
32 |
|
33 |
+
def today_tool() -> today: str:
|
34 |
"""Returns today's date. Use this function for any questions related to knowing today's date.
|
35 |
There should be no input. This function always returns today's date."""
|
36 |
return str(date.today())
|
37 |
|
38 |
+
def yf_download_tool(tickers: List[str], start_date: date, end_date: date) -> stock_data: pd.DataFrame:
|
39 |
"""Returns historical stock data for a list of given tickers from start date to end date
|
40 |
using the yfinance library download function.
|
41 |
Use this function for any questions related to getting historical stock data.
|
|
|
43 |
This function always returns a pandas DataFrame."""
|
44 |
return yf.download(tickers, start=start_date, end=end_date)
|
45 |
|
46 |
+
def tavily_search_tool(query: str) -> answer: str:
|
47 |
+
"""Searches the web for a given query and returns an answer, "
|
48 |
+
ready for use as context in a RAG application, using the Tavily API.
|
49 |
+
Use this function for any questions requiring knowledge not available to the model.
|
50 |
+
The input should be the query string. This function always returns an answer string."""
|
51 |
+
return tavily_client.get_search_context(query=query, max_results=5)
|
52 |
+
|
53 |
tools = {
|
54 |
"today_tool": today_tool,
|
55 |
"yf_download_tool": yf_download_tool,
|
56 |
+
"tavily_search_tool": tavily_search_tool,
|
57 |
}
|
58 |
|
59 |
+
def create_assistant(openai_client):
|
60 |
+
assistant = openai_client.beta.assistants.create(
|
61 |
name="Python Code Generator",
|
62 |
instructions=(
|
63 |
"You are a Python programming language expert that "
|
|
|
69 |
{"type": "code_interpreter"},
|
70 |
{"type": "function", "function": function_to_schema(today_tool)},
|
71 |
{"type": "function", "function": function_to_schema(yf_download_tool)},
|
72 |
+
{"type": "function", "function": function_to_schema(tavily_search_tool)},
|
73 |
],
|
74 |
)
|
75 |
|
|
|
77 |
|
78 |
return assistant
|
79 |
|
80 |
+
def load_assistant(openai_client):
|
81 |
+
assistant = openai_client.beta.assistants.retrieve(assistant_id)
|
82 |
|
83 |
show_json("assistant", assistant)
|
84 |
|
85 |
return assistant
|
86 |
|
87 |
+
def create_thread(openai_client):
|
88 |
+
thread = openai_client.beta.threads.create()
|
89 |
|
90 |
show_json("thread", thread)
|
91 |
|
92 |
return thread
|
93 |
|
94 |
+
def create_message(openai_client, thread, msg):
|
95 |
+
message = openai_client.beta.threads.messages.create(
|
96 |
role="user",
|
97 |
thread_id=thread.id,
|
98 |
content=msg,
|
|
|
102 |
|
103 |
return message
|
104 |
|
105 |
+
def create_run(openai_client, assistant, thread):
|
106 |
+
run = openai_client.beta.threads.runs.create(
|
107 |
assistant_id=assistant.id,
|
108 |
thread_id=thread.id,
|
109 |
parallel_tool_calls=False,
|
|
|
113 |
|
114 |
return run
|
115 |
|
116 |
+
def wait_on_run(openai_client, thread, run):
|
117 |
while run.status == "queued" or run.status == "in_progress":
|
118 |
+
run = openai_client.beta.threads.runs.retrieve(
|
119 |
thread_id=thread.id,
|
120 |
run_id=run.id,
|
121 |
)
|
|
|
|
|
122 |
|
123 |
time.sleep(1)
|
124 |
|
|
|
129 |
|
130 |
return run
|
131 |
|
132 |
+
def get_run_steps(openai_client, thread, run):
|
133 |
+
run_steps = openai_client.beta.threads.runs.steps.list(
|
134 |
thread_id=thread.id,
|
135 |
run_id=run.id,
|
136 |
order="asc",
|
|
|
170 |
|
171 |
return tool_call_ids, tool_call_results
|
172 |
|
173 |
+
def get_messages(openai_client, thread):
|
174 |
+
messages = openai_client.beta.threads.messages.list(
|
175 |
thread_id=thread.id
|
176 |
)
|
177 |
|
|
|
233 |
global assistant, thread
|
234 |
|
235 |
if assistant == None:
|
236 |
+
assistant = create_assistant(openai_client) # on first run, create assistant and update assistant_id
|
237 |
+
# see https://platform.openai.com/playground/assistants
|
238 |
+
#assistant = load_assistant(openai_client) # on subsequent runs, load assistant
|
239 |
|
240 |
if thread == None or len(history) == 0:
|
241 |
+
thread = create_thread(openai_client)
|
242 |
|
243 |
+
create_message(openai_client, thread, message)
|
244 |
|
245 |
+
run = create_run(openai_client, assistant, thread)
|
246 |
|
247 |
+
run = wait_on_run(openai_client, thread, run)
|
248 |
+
run_steps = get_run_steps(openai_client, thread, run)
|
249 |
|
250 |
### TODO
|
251 |
tool_call_ids, tool_call_results = execute_tool_calls(run_steps)
|
|
|
273 |
"output": tool_call_results[0]
|
274 |
}
|
275 |
|
276 |
+
run = openai_client.beta.threads.runs.submit_tool_outputs(
|
277 |
thread_id=thread.id,
|
278 |
run_id=run.id,
|
279 |
tool_outputs=[tool_output]
|
280 |
)
|
281 |
|
282 |
+
run = wait_on_run(openai_client, thread, run)
|
283 |
+
run_steps = get_run_steps(openai_client, thread, run)
|
284 |
###
|
285 |
tool_call_ids, tool_call_results = execute_tool_calls(run_steps)
|
286 |
|
|
|
307 |
"output": tool_call_results[1]
|
308 |
}
|
309 |
|
310 |
+
run = openai_client.beta.threads.runs.submit_tool_outputs(
|
311 |
thread_id=thread.id,
|
312 |
run_id=run.id,
|
313 |
tool_outputs=[tool_output]
|
314 |
)
|
315 |
|
316 |
+
run = wait_on_run(openai_client, thread, run)
|
317 |
+
run_steps = get_run_steps(openai_client, thread, run)
|
318 |
###
|
319 |
|
320 |
+
messages = get_messages(openai_client, thread)
|
321 |
|
322 |
text_values, image_values = extract_content_values(messages)
|
323 |
|
|
|
337 |
description=(
|
338 |
"The assistant can **generate, explain, fix, optimize, document, and test code**. "
|
339 |
"It can also **execute code**. "
|
340 |
+
"It has access to <b>today tool</b> (get current date), to "
|
341 |
+
"**yfinance download tool** (get stock data), and to "
|
342 |
+
"**tavily search tool** (search web)."
|
343 |
),
|
344 |
clear_btn="Clear",
|
345 |
retry_btn=None,
|
|
|
350 |
["Fix: x = [5, 2, 1, 3, 4]; print(x.sort())"],
|
351 |
["Optimize: x = []; for i in range(0, 10000): x.append(i)"],
|
352 |
["Execute: First 25 Fibbonaci numbers"],
|
353 |
+
["Execute with tools: Create a plot showing stock gain QTD for NVDA and AMD, x-axis is \"Day\" and y-axis is \"Gain %\""],
|
354 |
+
["Execute with tools: TODO"]
|
355 |
],
|
356 |
cache_examples=False,
|
357 |
).launch()
|