LamiaYT commited on
Commit
bc6672f
·
1 Parent(s): dabcfc7
Files changed (1) hide show
  1. app.py +68 -66
app.py CHANGED
@@ -13,6 +13,7 @@ from urllib.parse import urlparse, parse_qs
13
 
14
  # --- Constants ---
15
  DEFAULT_API_URL = "https://agents-course-unit4-scoring.hf.space"
 
16
 
17
  # --- Enhanced Tools with Rate Limiting and Better Answers ---
18
 
@@ -78,6 +79,67 @@ def smart_web_search(query: str) -> str:
78
  except Exception as e:
79
  return f"Search error: {str(e)}"
80
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
81
  @tool
82
  def extract_youtube_details(url: str) -> str:
83
  """
@@ -299,67 +361,6 @@ def solve_advanced_math(problem: str) -> str:
299
  except Exception as e:
300
  return f"Math solver error: {str(e)}"
301
 
302
- @tool
303
- def get_detailed_wikipedia(topic: str) -> str:
304
- """
305
- Get detailed Wikipedia information with better parsing.
306
-
307
- Args:
308
- topic: Wikipedia topic to search
309
-
310
- Returns:
311
- Detailed Wikipedia information
312
- """
313
- try:
314
- time.sleep(1) # Rate limiting
315
-
316
- # Clean topic
317
- topic_clean = topic.replace(" ", "_").strip()
318
-
319
- # Try direct page access
320
- summary_url = f"https://en.wikipedia.org/api/rest_v1/page/summary/{topic_clean}"
321
- response = requests.get(summary_url, timeout=12)
322
-
323
- if response.status_code == 200:
324
- data = response.json()
325
- results = []
326
- results.append(f"TITLE: {data.get('title', '')}")
327
- results.append(f"EXTRACT: {data.get('extract', '')}")
328
-
329
- # Get page URL for more details
330
- page_url = data.get('content_urls', {}).get('desktop', {}).get('page', '')
331
- if page_url:
332
- results.append(f"URL: {page_url}")
333
-
334
- return "\n".join(results)
335
-
336
- # Fallback to search API
337
- search_url = "https://en.wikipedia.org/w/api.php"
338
- params = {
339
- "action": "query",
340
- "format": "json",
341
- "list": "search",
342
- "srsearch": topic,
343
- "srlimit": 5
344
- }
345
-
346
- search_response = requests.get(search_url, params=params, timeout=12)
347
- if search_response.status_code == 200:
348
- search_data = search_response.json()
349
-
350
- results = []
351
- for item in search_data.get('query', {}).get('search', [])[:3]:
352
- title = item['title']
353
- snippet = re.sub(r'<[^>]+>', '', item['snippet'])
354
- results.append(f"TITLE: {title}\nSNIPPET: {snippet}")
355
-
356
- return "\n\n".join(results) if results else "No Wikipedia results found"
357
-
358
- return f"Wikipedia lookup failed for: {topic}"
359
-
360
- except Exception as e:
361
- return f"Wikipedia error: {str(e)}"
362
-
363
  # --- Optimized Agent Class ---
364
  class OptimizedGAIAAgent:
365
  def __init__(self):
@@ -367,16 +368,17 @@ class OptimizedGAIAAgent:
367
 
368
  self.tools = [
369
  smart_web_search,
 
370
  extract_youtube_details,
371
  decode_reversed_text,
372
- solve_advanced_math,
373
- get_detailed_wikipedia
374
  ]
375
 
376
  # Initialize CodeAgent with better error handling
377
  try:
378
  self.agent = CodeAgent(
379
  tools=self.tools,
 
380
  additional_authorized_imports=["math", "re", "json", "time"]
381
  )
382
  print("✅ CodeAgent initialized")
@@ -410,11 +412,11 @@ class OptimizedGAIAAgent:
410
 
411
  # Wikipedia-focused searches
412
  if any(term in question_lower for term in ["who", "what", "when", "where", "wikipedia", "article"]):
413
- return get_detailed_wikipedia(question)
414
 
415
  # Olympics questions
416
  if "olympics" in question_lower or "1928" in question:
417
- return get_detailed_wikipedia("1928 Summer Olympics")
418
 
419
  # Default to smart search with delay
420
  return smart_web_search(question)
@@ -591,7 +593,7 @@ if __name__ == "__main__":
591
  print("🎯 Starting Optimized GAIA Agent...")
592
 
593
  # Environment check
594
- env_vars = ["SPACE_ID", "SERPER_API_KEY"]
595
  for var in env_vars:
596
  status = "✅" if os.getenv(var) else "⚠️"
597
  print(f"{status} {var}")
 
13
 
14
  # --- Constants ---
15
  DEFAULT_API_URL = "https://agents-course-unit4-scoring.hf.space"
16
+ WIKIPEDIA_API_KEY = os.getenv("WIKIPEDIA_API_KEY", "default_key") # Fallback key if needed
17
 
18
  # --- Enhanced Tools with Rate Limiting and Better Answers ---
19
 
 
79
  except Exception as e:
80
  return f"Search error: {str(e)}"
81
 
82
+ @tool
83
+ def get_wikipedia_info(query: str) -> str:
84
+ """
85
+ Enhanced Wikipedia search with API key support and better result parsing.
86
+
87
+ Args:
88
+ query: Search query string
89
+
90
+ Returns:
91
+ Formatted Wikipedia information
92
+ """
93
+ try:
94
+ # Clean the query
95
+ clean_query = re.sub(r'[^a-zA-Z0-9 ]', '', query)[:100]
96
+
97
+ # First try the Wikipedia API with our key
98
+ params = {
99
+ 'action': 'query',
100
+ 'format': 'json',
101
+ 'list': 'search',
102
+ 'srsearch': clean_query,
103
+ 'srlimit': 3,
104
+ 'srprop': 'snippet',
105
+ 'utf8': 1
106
+ }
107
+
108
+ if WIKIPEDIA_API_KEY and WIKIPEDIA_API_KEY != "default_key":
109
+ params['apikey'] = WIKIPEDIA_API_KEY
110
+
111
+ response = requests.get(
112
+ "https://en.wikipedia.org/w/api.php",
113
+ params=params,
114
+ timeout=10
115
+ )
116
+
117
+ if response.status_code == 200:
118
+ data = response.json()
119
+ results = []
120
+
121
+ for item in data.get('query', {}).get('search', []):
122
+ title = item.get('title', '')
123
+ snippet = re.sub(r'<[^>]+>', '', item.get('snippet', ''))
124
+ results.append(f"TITLE: {title}\nSNIPPET: {snippet}")
125
+
126
+ if results:
127
+ return "\n\n".join(results)
128
+
129
+ # Fallback to page extracts for exact matches
130
+ page_title = clean_query.replace(' ', '_')
131
+ extract_url = f"https://en.wikipedia.org/api/rest_v1/page/summary/{page_title}"
132
+ extract_response = requests.get(extract_url, timeout=8)
133
+
134
+ if extract_response.status_code == 200:
135
+ extract_data = extract_response.json()
136
+ return f"TITLE: {extract_data.get('title', '')}\nEXTRACT: {extract_data.get('extract', '')}"
137
+
138
+ return f"No Wikipedia results found for: {clean_query}"
139
+
140
+ except Exception as e:
141
+ return f"Wikipedia search error: {str(e)}"
142
+
143
  @tool
144
  def extract_youtube_details(url: str) -> str:
145
  """
 
361
  except Exception as e:
362
  return f"Math solver error: {str(e)}"
363
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
364
  # --- Optimized Agent Class ---
365
  class OptimizedGAIAAgent:
366
  def __init__(self):
 
368
 
369
  self.tools = [
370
  smart_web_search,
371
+ get_wikipedia_info,
372
  extract_youtube_details,
373
  decode_reversed_text,
374
+ solve_advanced_math
 
375
  ]
376
 
377
  # Initialize CodeAgent with better error handling
378
  try:
379
  self.agent = CodeAgent(
380
  tools=self.tools,
381
+ model="gpt-3.5-turbo", # Added required model parameter
382
  additional_authorized_imports=["math", "re", "json", "time"]
383
  )
384
  print("✅ CodeAgent initialized")
 
412
 
413
  # Wikipedia-focused searches
414
  if any(term in question_lower for term in ["who", "what", "when", "where", "wikipedia", "article"]):
415
+ return get_wikipedia_info(question)
416
 
417
  # Olympics questions
418
  if "olympics" in question_lower or "1928" in question:
419
+ return get_wikipedia_info("1928 Summer Olympics")
420
 
421
  # Default to smart search with delay
422
  return smart_web_search(question)
 
593
  print("🎯 Starting Optimized GAIA Agent...")
594
 
595
  # Environment check
596
+ env_vars = ["SPACE_ID", "SERPER_API_KEY", "WIKIPEDIA_API_KEY"]
597
  for var in env_vars:
598
  status = "✅" if os.getenv(var) else "⚠️"
599
  print(f"{status} {var}")