Spaces:
Runtime error
Runtime error
Update agent.py
Browse files
agent.py
CHANGED
@@ -3,8 +3,6 @@ from langchain_community.tools.tavily_search import TavilySearchResults
|
|
3 |
from langchain_community.document_loaders import WikipediaLoader, ArxivLoader
|
4 |
|
5 |
|
6 |
-
|
7 |
-
|
8 |
class add(Tool):
|
9 |
name = "add"
|
10 |
description = """
|
@@ -128,7 +126,7 @@ class modulo(Tool):
|
|
128 |
def forward(self, a: int, b: int):
|
129 |
return a % b
|
130 |
|
131 |
-
class WikipediaSearchTool(
|
132 |
name = "wikipedia_search_tool"
|
133 |
|
134 |
description = """
|
@@ -155,7 +153,7 @@ class WikipediaSearchTool(tool):
|
|
155 |
])
|
156 |
return {"wikipedia_results": condensed_docs}
|
157 |
|
158 |
-
class TavilySearchTool(
|
159 |
name = "tavily_search_tool"
|
160 |
|
161 |
description = """
|
@@ -183,20 +181,32 @@ class TavilySearchTool(tool):
|
|
183 |
return {"web_search_results": condensed_docs}
|
184 |
|
185 |
|
186 |
-
|
187 |
-
|
188 |
-
|
|
|
|
|
189 |
|
190 |
-
|
191 |
-
|
192 |
-
|
193 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
194 |
[
|
195 |
f'<Document source="{doc.metadata["source"]}" page="{doc.metadata.get("page", "")}"/>\n{doc.page_content[:1000]}\n</Document>'
|
196 |
for doc in search_docs
|
197 |
])
|
198 |
-
return {"
|
199 |
-
|
200 |
|
201 |
|
202 |
tools=[
|
@@ -205,20 +215,33 @@ tools=[
|
|
205 |
multiply(),
|
206 |
divide(),
|
207 |
modulo(),
|
208 |
-
|
209 |
-
|
|
|
210 |
]
|
211 |
|
212 |
model = LiteLLMModel(
|
213 |
-
|
|
|
|
|
214 |
)
|
215 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
216 |
def create_agent():
|
217 |
agent = CodeAgent(
|
218 |
model = model,
|
219 |
tools = tools,
|
220 |
max_steps=10,
|
221 |
-
verbosity_level=2
|
|
|
|
|
|
|
222 |
)
|
223 |
|
224 |
return agent
|
|
|
3 |
from langchain_community.document_loaders import WikipediaLoader, ArxivLoader
|
4 |
|
5 |
|
|
|
|
|
6 |
class add(Tool):
|
7 |
name = "add"
|
8 |
description = """
|
|
|
126 |
def forward(self, a: int, b: int):
|
127 |
return a % b
|
128 |
|
129 |
+
class WikipediaSearchTool(Tool):
|
130 |
name = "wikipedia_search_tool"
|
131 |
|
132 |
description = """
|
|
|
153 |
])
|
154 |
return {"wikipedia_results": condensed_docs}
|
155 |
|
156 |
+
class TavilySearchTool(Tool):
|
157 |
name = "tavily_search_tool"
|
158 |
|
159 |
description = """
|
|
|
181 |
return {"web_search_results": condensed_docs}
|
182 |
|
183 |
|
184 |
+
class ArvixSearchTool(Tool):
|
185 |
+
name = "arvix_search_tool"
|
186 |
+
|
187 |
+
description = """
|
188 |
+
Search arxiv for a query and return maximum 3 result.
|
189 |
|
190 |
+
Args:
|
191 |
+
query: The search query.
|
192 |
+
"""
|
193 |
+
|
194 |
+
inputs = {
|
195 |
+
"query":{
|
196 |
+
"type":"string",
|
197 |
+
"description":"the search query"
|
198 |
+
}
|
199 |
+
}
|
200 |
+
output_type = "string"
|
201 |
+
|
202 |
+
def forward(self, query: str) -> str:
|
203 |
+
documents = ArxivLoader(query=query, load_max_docs=3).load()
|
204 |
+
condensed_docs = "\n\n---\n\n".join(
|
205 |
[
|
206 |
f'<Document source="{doc.metadata["source"]}" page="{doc.metadata.get("page", "")}"/>\n{doc.page_content[:1000]}\n</Document>'
|
207 |
for doc in search_docs
|
208 |
])
|
209 |
+
return {"arvix_search_results": formatted_search_docs}
|
|
|
210 |
|
211 |
|
212 |
tools=[
|
|
|
215 |
multiply(),
|
216 |
divide(),
|
217 |
modulo(),
|
218 |
+
WikipediaSearchTool(),
|
219 |
+
ArvixSearchTool(),
|
220 |
+
TavilySearchTool()
|
221 |
]
|
222 |
|
223 |
model = LiteLLMModel(
|
224 |
+
model_id='ollama_chat/gemma3:27b',
|
225 |
+
api_base="http://127.0.0.1:11434",
|
226 |
+
num_ctx=8192
|
227 |
)
|
228 |
|
229 |
+
def delay_execution_10(pagent, **kwargs) -> bool:
|
230 |
+
"""
|
231 |
+
Delays the execution for 10 seconds.
|
232 |
+
"""
|
233 |
+
time.sleep(10)
|
234 |
+
return True
|
235 |
+
|
236 |
def create_agent():
|
237 |
agent = CodeAgent(
|
238 |
model = model,
|
239 |
tools = tools,
|
240 |
max_steps=10,
|
241 |
+
verbosity_level=2,
|
242 |
+
additional_authorized_imports=['*'],
|
243 |
+
planning_interval=5,
|
244 |
+
step_callbacks=[delay_execution_10]
|
245 |
)
|
246 |
|
247 |
return agent
|