Update app.py
Browse files
app.py
CHANGED
@@ -60,92 +60,6 @@ def hf_chat(api_key, model, text):
|
|
60 |
|
61 |
return stream.choices[0].message.content
|
62 |
|
63 |
-
class GoogleSearchTool(Tool):
|
64 |
-
name = "web_search"
|
65 |
-
description = """Performs a google web search for your query then returns a string of the top search results."""
|
66 |
-
inputs = {
|
67 |
-
"query": {"type": "string", "description": "The search query to perform."},
|
68 |
-
"filter_year": {
|
69 |
-
"type": "integer",
|
70 |
-
"description": "Optionally restrict results to a certain year",
|
71 |
-
"nullable": True,
|
72 |
-
},
|
73 |
-
}
|
74 |
-
output_type = "string"
|
75 |
-
|
76 |
-
def __init__(self):
|
77 |
-
super().__init__(self)
|
78 |
-
import os
|
79 |
-
|
80 |
-
self.serpapi_key = os.getenv("SERPER_API_KEY")
|
81 |
-
|
82 |
-
def forward(self, query: str, filter_year: Optional[int] = None) -> str:
|
83 |
-
import requests
|
84 |
-
|
85 |
-
if self.serpapi_key is None:
|
86 |
-
raise ValueError("Missing SerpAPI key. Make sure you have 'SERPER_API_KEY' in your env variables.")
|
87 |
-
|
88 |
-
params = {
|
89 |
-
"engine": "google",
|
90 |
-
"q": query,
|
91 |
-
"api_key": self.serpapi_key,
|
92 |
-
"google_domain": "google.com",
|
93 |
-
}
|
94 |
-
|
95 |
-
headers = {
|
96 |
-
'X-API-KEY': self.serpapi_key,
|
97 |
-
'Content-Type': 'application/json'
|
98 |
-
}
|
99 |
-
|
100 |
-
if filter_year is not None:
|
101 |
-
params["tbs"] = f"cdr:1,cd_min:01/01/{filter_year},cd_max:12/31/{filter_year}"
|
102 |
-
|
103 |
-
response = requests.request("POST", "https://google.serper.dev/search", headers=headers, data=json.dumps(params))
|
104 |
-
|
105 |
-
|
106 |
-
if response.status_code == 200:
|
107 |
-
results = response.json()
|
108 |
-
else:
|
109 |
-
raise ValueError(response.json())
|
110 |
-
|
111 |
-
if "organic" not in results.keys():
|
112 |
-
print("REZZZ", results.keys())
|
113 |
-
if filter_year is not None:
|
114 |
-
raise Exception(
|
115 |
-
f"No results found for query: '{query}' with filtering on year={filter_year}. Use a less restrictive query or do not filter on year."
|
116 |
-
)
|
117 |
-
else:
|
118 |
-
raise Exception(f"No results found for query: '{query}'. Use a less restrictive query.")
|
119 |
-
if len(results["organic"]) == 0:
|
120 |
-
year_filter_message = f" with filter year={filter_year}" if filter_year is not None else ""
|
121 |
-
return f"No results found for '{query}'{year_filter_message}. Try with a more general query, or remove the year filter."
|
122 |
-
|
123 |
-
web_snippets = []
|
124 |
-
if "organic" in results:
|
125 |
-
for idx, page in enumerate(results["organic"]):
|
126 |
-
date_published = ""
|
127 |
-
if "date" in page:
|
128 |
-
date_published = "\nDate published: " + page["date"]
|
129 |
-
|
130 |
-
source = ""
|
131 |
-
if "source" in page:
|
132 |
-
source = "\nSource: " + page["source"]
|
133 |
-
|
134 |
-
snippet = ""
|
135 |
-
if "snippet" in page:
|
136 |
-
snippet = "\n" + page["snippet"]
|
137 |
-
|
138 |
-
redacted_version = f"{idx}. [{page['title']}]({page['link']}){date_published}{source}\n{snippet}"
|
139 |
-
|
140 |
-
redacted_version = redacted_version.replace("Your browser can't play this video.", "")
|
141 |
-
web_snippets.append(redacted_version)
|
142 |
-
|
143 |
-
return "## Search Results\n" + "\n\n".join(web_snippets)
|
144 |
-
|
145 |
-
# web_search = GoogleSearchTool()
|
146 |
-
|
147 |
-
# print(web_search(query="Donald Trump news"))
|
148 |
-
# quit()
|
149 |
AUTHORIZED_IMPORTS = [
|
150 |
"requests",
|
151 |
"zipfile",
|
@@ -193,6 +107,7 @@ BROWSER_CONFIG = {
|
|
193 |
|
194 |
os.makedirs(f"./{BROWSER_CONFIG['downloads_folder']}", exist_ok=True)
|
195 |
|
|
|
196 |
model = LiteLLMModel(
|
197 |
#"deepseek-r1-distill-qwen-32b",
|
198 |
#"llama-3.3-70b-versatile",
|
@@ -202,6 +117,13 @@ model = LiteLLMModel(
|
|
202 |
max_completion_tokens=500,
|
203 |
api_key=os.getenv("OPENAI_API_KEY")#Groq API
|
204 |
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
205 |
model._flatten_messages_as_text = True
|
206 |
|
207 |
|
|
|
60 |
|
61 |
return stream.choices[0].message.content
|
62 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
63 |
AUTHORIZED_IMPORTS = [
|
64 |
"requests",
|
65 |
"zipfile",
|
|
|
107 |
|
108 |
os.makedirs(f"./{BROWSER_CONFIG['downloads_folder']}", exist_ok=True)
|
109 |
|
110 |
+
|
111 |
model = LiteLLMModel(
|
112 |
#"deepseek-r1-distill-qwen-32b",
|
113 |
#"llama-3.3-70b-versatile",
|
|
|
117 |
max_completion_tokens=500,
|
118 |
api_key=os.getenv("OPENAI_API_KEY")#Groq API
|
119 |
)
|
120 |
+
from smolagents import OpenAIServerModel
|
121 |
+
model = OpenAIServerModel(
|
122 |
+
model_id="gemma2-9b-it",
|
123 |
+
api_base="https://api.groq.com/openai/v1",
|
124 |
+
api_key=os.environ["OPENAI_API_KEY"],
|
125 |
+
max_completion_tokens=500,
|
126 |
+
)
|
127 |
model._flatten_messages_as_text = True
|
128 |
|
129 |
|