Spaces:
Running
Running
Update app.py
Browse filesAdded brave news search for context
app.py
CHANGED
@@ -23,8 +23,9 @@ from datetime import datetime, timedelta
|
|
23 |
import pytz
|
24 |
import math
|
25 |
import numpy as np
|
26 |
-
# import matplotlib.pyplot as plt
|
27 |
from pylatexenc.latex2text import LatexNodes2Text
|
|
|
|
|
28 |
|
29 |
load_dotenv(override=True)
|
30 |
key = os.getenv('OPENAI_API_KEY')
|
@@ -34,6 +35,7 @@ pwds = os.getenv('PASSWORD')
|
|
34 |
pwdList = pwds.split(',')
|
35 |
DEEPSEEK_KEY=os.getenv('DEEPSEEK_KEY')
|
36 |
GROQ_KEY=os.getenv('GROQ_KEY')
|
|
|
37 |
|
38 |
site = os.getenv('SITE')
|
39 |
if site == 'local':
|
@@ -46,6 +48,7 @@ else:
|
|
46 |
dataDir = '/data/'
|
47 |
stock_data_path = dataDir + 'Stocks.txt'
|
48 |
|
|
|
49 |
|
50 |
speak_file = dataDir + "speek.wav"
|
51 |
|
@@ -57,6 +60,8 @@ abbrevs = {'St. ' : 'Saint ', 'Mr. ': 'mister ', 'Mrs. ':'mussus ', 'Mr. ':'mist
|
|
57 |
|
58 |
special_chat_types = ['math', 'logic']
|
59 |
|
|
|
|
|
60 |
class Step(BaseModel):
|
61 |
explanation: str
|
62 |
output: str
|
@@ -65,6 +70,29 @@ class MathReasoning(BaseModel):
|
|
65 |
steps: list[Step]
|
66 |
final_answer: str
|
67 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
68 |
def Client():
|
69 |
return OpenAI(api_key = key)
|
70 |
|
@@ -585,8 +613,9 @@ def new_conversation(user):
|
|
585 |
for fpath in flist:
|
586 |
if os.path.exists(fpath):
|
587 |
os.remove(fpath)
|
588 |
-
return [None, [], gr.Markdown(value='', label='Dialog', container=True),
|
589 |
-
gr.
|
|
|
590 |
|
591 |
def updatePassword(txt):
|
592 |
password = txt.lower().strip()
|
@@ -610,7 +639,7 @@ def updatePassword(txt):
|
|
610 |
# ref = len(txt[ref:loc]) + len(frag)
|
611 |
# return txt
|
612 |
|
613 |
-
def chat(prompt, user_window, pwd_window, past, response, gptModel, uploaded_image_file='', plot=None):
|
614 |
image_gen_model = 'gpt-4o-2024-08-06'
|
615 |
user_window = user_window.lower().strip()
|
616 |
isBoss = False
|
@@ -676,6 +705,7 @@ def chat(prompt, user_window, pwd_window, past, response, gptModel, uploaded_ima
|
|
676 |
using_groq = False
|
677 |
reasoning = False
|
678 |
prompt = prompt.strip()
|
|
|
679 |
finish_reason = 'ok'
|
680 |
if prompt.lower().startswith('dsr1 '):
|
681 |
deepseek = True
|
@@ -711,7 +741,10 @@ def chat(prompt, user_window, pwd_window, past, response, gptModel, uploaded_ima
|
|
711 |
prompt = prompt[6:]
|
712 |
if deepseek:
|
713 |
prompt = prompt + '. Do not use Latex for math expressions.'
|
714 |
-
|
|
|
|
|
|
|
715 |
gen_image = (uploaded_image_file != '')
|
716 |
if chatType in special_chat_types:
|
717 |
(reply, tokens_in, tokens_out, tokens) = solve(prompt, chatType)
|
@@ -1179,7 +1212,9 @@ with gr.Blocks(theme=gr.themes.Soft()) as demo:
|
|
1179 |
button_get_image = gr.Button(value='Upload Image to Analyze')
|
1180 |
speak_output = gr.Button(value="Speak Dialog", visible=True)
|
1181 |
submit_button = gr.Button(value="Submit Prompt/Question")
|
1182 |
-
|
|
|
|
|
1183 |
gr.Markdown('### **Dialog:**')
|
1184 |
#output_window = gr.Text(container=True, label='Dialog')
|
1185 |
output_window = gr.Markdown(container=True)
|
@@ -1192,10 +1227,10 @@ with gr.Blocks(theme=gr.themes.Soft()) as demo:
|
|
1192 |
# plot = gr.Plot(visible=False)
|
1193 |
plot = gr.LinePlot(test_plot_df(), x="month", y="value", visible=False, label="Portfolio Value History")
|
1194 |
submit_button.click(chat,
|
1195 |
-
inputs=[prompt_window, user_window, password, history, output_window, model, uploaded_image_file],
|
1196 |
outputs=[history, output_window, prompt_window, model, uploaded_image_file, plot])
|
1197 |
clear_button.click(fn=new_conversation, inputs=user_window,
|
1198 |
-
outputs=[prompt_window, history, output_window, image_window, image_window2, uploaded_image_file, plot])
|
1199 |
audio_widget.stop_recording(fn=transcribe, inputs=[user_window, password, audio_widget],
|
1200 |
outputs=[prompt_window])
|
1201 |
audio_widget.pause_recording(fn=pause_message, outputs=[prompt_window])
|
|
|
23 |
import pytz
|
24 |
import math
|
25 |
import numpy as np
|
|
|
26 |
from pylatexenc.latex2text import LatexNodes2Text
|
27 |
+
import requests
|
28 |
+
from urllib.parse import quote
|
29 |
|
30 |
load_dotenv(override=True)
|
31 |
key = os.getenv('OPENAI_API_KEY')
|
|
|
35 |
pwdList = pwds.split(',')
|
36 |
DEEPSEEK_KEY=os.getenv('DEEPSEEK_KEY')
|
37 |
GROQ_KEY=os.getenv('GROQ_KEY')
|
38 |
+
BRAVE_KEY=os.getenv('BRAVE_KEY')
|
39 |
|
40 |
site = os.getenv('SITE')
|
41 |
if site == 'local':
|
|
|
48 |
dataDir = '/data/'
|
49 |
stock_data_path = dataDir + 'Stocks.txt'
|
50 |
|
51 |
+
braveNewsEndpoint = "https://api.search.brave.com/res/v1/news/search"
|
52 |
|
53 |
speak_file = dataDir + "speek.wav"
|
54 |
|
|
|
60 |
|
61 |
special_chat_types = ['math', 'logic']
|
62 |
|
63 |
+
news_interval_choices = [("None", "None"), ("Day", "pd"), ("Week", "pw"), ("Month", "pm"), ("Year", "py")]
|
64 |
+
|
65 |
class Step(BaseModel):
|
66 |
explanation: str
|
67 |
output: str
|
|
|
70 |
steps: list[Step]
|
71 |
final_answer: str
|
72 |
|
73 |
+
def get_brave_news(query: str, interval: str = 'pd'):
|
74 |
+
url = f'{braveNewsEndpoint}?q={quote(query)}&count=20&extra_snippets=true&freshness={interval}'
|
75 |
+
response = requests.get(
|
76 |
+
url,
|
77 |
+
headers= {"Accept": "application/json",
|
78 |
+
"X-Subscription-Token": BRAVE_KEY
|
79 |
+
},
|
80 |
+
)
|
81 |
+
rv = ''
|
82 |
+
jdata = response.json()
|
83 |
+
for item in jdata['results']:
|
84 |
+
title = item['title']
|
85 |
+
description = item['description']
|
86 |
+
rv += f'{title}: {description} --'
|
87 |
+
try: # extra_snippets can be missing
|
88 |
+
for snip in item['extra_snippets']:
|
89 |
+
rv += (snip + ' ')
|
90 |
+
except:
|
91 |
+
continue
|
92 |
+
rv += ' ** '
|
93 |
+
|
94 |
+
return rv
|
95 |
+
|
96 |
def Client():
|
97 |
return OpenAI(api_key = key)
|
98 |
|
|
|
613 |
for fpath in flist:
|
614 |
if os.path.exists(fpath):
|
615 |
os.remove(fpath)
|
616 |
+
return [None, [], gr.Markdown(value='', label='Dialog', container=True),
|
617 |
+
gr.Image(visible=False, value=None), gr.Image(visible=False, value=None), '',
|
618 |
+
gr.LinePlot(visible=False), gr.Dropdown(value="None")]
|
619 |
|
620 |
def updatePassword(txt):
|
621 |
password = txt.lower().strip()
|
|
|
639 |
# ref = len(txt[ref:loc]) + len(frag)
|
640 |
# return txt
|
641 |
|
642 |
+
def chat(prompt, user_window, pwd_window, past, response, gptModel, uploaded_image_file='', plot=None, news_interval = 'none'):
|
643 |
image_gen_model = 'gpt-4o-2024-08-06'
|
644 |
user_window = user_window.lower().strip()
|
645 |
isBoss = False
|
|
|
705 |
using_groq = False
|
706 |
reasoning = False
|
707 |
prompt = prompt.strip()
|
708 |
+
news_prompt = prompt
|
709 |
finish_reason = 'ok'
|
710 |
if prompt.lower().startswith('dsr1 '):
|
711 |
deepseek = True
|
|
|
741 |
prompt = prompt[6:]
|
742 |
if deepseek:
|
743 |
prompt = prompt + '. Do not use Latex for math expressions.'
|
744 |
+
if news_interval != "None" and past==[]:
|
745 |
+
news = get_brave_news(prompt, news_interval)
|
746 |
+
news_prompt = f'{news}\n{prompt}\nGive highest priority to information just provided\n'
|
747 |
+
past.append({"role":"user", "content":news_prompt})
|
748 |
gen_image = (uploaded_image_file != '')
|
749 |
if chatType in special_chat_types:
|
750 |
(reply, tokens_in, tokens_out, tokens) = solve(prompt, chatType)
|
|
|
1212 |
button_get_image = gr.Button(value='Upload Image to Analyze')
|
1213 |
speak_output = gr.Button(value="Speak Dialog", visible=True)
|
1214 |
submit_button = gr.Button(value="Submit Prompt/Question")
|
1215 |
+
with gr.Row():
|
1216 |
+
prompt_window = gr.Textbox(label = "Prompt or Question", scale=5)
|
1217 |
+
news_period = gr.Dropdown(choices=news_interval_choices, interactive=True,label='News Window',scale=1)
|
1218 |
gr.Markdown('### **Dialog:**')
|
1219 |
#output_window = gr.Text(container=True, label='Dialog')
|
1220 |
output_window = gr.Markdown(container=True)
|
|
|
1227 |
# plot = gr.Plot(visible=False)
|
1228 |
plot = gr.LinePlot(test_plot_df(), x="month", y="value", visible=False, label="Portfolio Value History")
|
1229 |
submit_button.click(chat,
|
1230 |
+
inputs=[prompt_window, user_window, password, history, output_window, model, uploaded_image_file, plot, news_period],
|
1231 |
outputs=[history, output_window, prompt_window, model, uploaded_image_file, plot])
|
1232 |
clear_button.click(fn=new_conversation, inputs=user_window,
|
1233 |
+
outputs=[prompt_window, history, output_window, image_window, image_window2, uploaded_image_file, plot, news_period])
|
1234 |
audio_widget.stop_recording(fn=transcribe, inputs=[user_window, password, audio_widget],
|
1235 |
outputs=[prompt_window])
|
1236 |
audio_widget.pause_recording(fn=pause_message, outputs=[prompt_window])
|