lization commited on
Commit
a3ccd27
·
verified ·
1 Parent(s): be5471f

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +31 -38
app.py CHANGED
@@ -1,72 +1,65 @@
1
- !pip install meteostat
2
- from smolagents import CodeAgent,DuckDuckGoSearchTool, HfApiModel,load_tool,tool
3
- from meteostat import Stations, Daily
4
  import datetime
5
  import requests
 
6
  import yaml
7
  from tools.final_answer import FinalAnswerTool
8
 
9
  from Gradio_UI import GradioUI
10
 
11
- # Below is an example of a tool that does nothing. Amaze us with your creativity !
12
- @tool
13
- def World_temperature(lat: float, lon: float) -> str:
14
- """Fetches the current temperature in a specified location using latitude & longitude.
15
-
16
  Args:
17
- lat: Latitude of the location (e.g., 51.51 for London).
18
- lon: Longitude of the location (e.g., -0.13 for London).
19
-
20
- Returns:
21
- A string with the current temperature.
22
  """
23
- try:
24
- # Get nearest weather station
25
- stations = Stations().nearby(lat, lon).fetch(1)
26
- station_id = stations.index[0]
27
-
28
- # Get today's weather data
29
- today = datetime.datetime.today()
30
- data = Daily(station_id, today, today).fetch()
31
 
32
- if not data.empty:
33
- temp = data["tavg"].iloc[0] # Get average temperature
34
- return f"The current temperature at ({lat}, {lon}) is {temp}°C."
35
- else:
36
- return "No temperature data available for this location."
37
 
 
 
 
 
 
 
 
 
 
 
 
38
  except Exception as e:
39
- return f"Error fetching temperature: {str(e)}"
40
-
41
- final_answer = FinalAnswerTool()
42
 
43
- # If the agent does not answer, the model is overloaded, please use another model or the following Hugging Face Endpoint that also contains qwen2.5 coder:
44
- # model_id='https://pflgm2locj2t89co.us-east-1.aws.endpoints.huggingface.cloud'
45
 
46
- model = HfApiModel(
47
- max_tokens=2096,
48
- temperature=0.5,
49
- model_id='Qwen/Qwen2.5-Coder-32B-Instruct',# it is possible that this model may be overloaded
50
- custom_role_conversions=None,
 
51
  )
52
 
53
 
54
  # Import tool from Hub
55
  image_generation_tool = load_tool("agents-course/text-to-image", trust_remote_code=True)
56
 
 
57
  with open("prompts.yaml", 'r') as stream:
58
  prompt_templates = yaml.safe_load(stream)
59
 
60
  agent = CodeAgent(
61
  model=model,
62
- tools=[final_answer, World_temperature], ## add your tools here (don't remove final answer)
63
  max_steps=6,
64
  verbosity_level=1,
65
  grammar=None,
66
  planning_interval=None,
67
  name=None,
68
  description=None,
69
- prompt_templates=prompt_templates
70
  )
71
 
72
 
 
1
+ from smolagents import CodeAgent, DuckDuckGoSearchTool, HfApiModel, load_tool, tool
 
 
2
  import datetime
3
  import requests
4
+ import pytz
5
  import yaml
6
  from tools.final_answer import FinalAnswerTool
7
 
8
  from Gradio_UI import GradioUI
9
 
10
+ # Below is an example of a tool that does nothing. Amaze us with your creativity!
11
+
12
+ def my_custom_tool(arg1:str, arg2:int)-> str: # it's important to specify the return type
13
+ # Keep this format for the tool description / args description but feel free to modify the tool
14
+ """A tool that does nothing yet
15
  Args:
16
+ arg1: the first argument
17
+ arg2: the second argument
 
 
 
18
  """
19
+ return "What magic will you build ?"
 
 
 
 
 
 
 
20
 
 
 
 
 
 
21
 
22
+ def get_current_time_in_timezone(timezone: str) -> str:
23
+ """A tool that fetches the current local time in a specified timezone.
24
+ Args:
25
+ timezone: A string representing a valid timezone (e.g., 'America/New_York').
26
+ """
27
+ try:
28
+ # Create timezone object
29
+ tz = pytz.timezone(timezone)
30
+ # Get current time in that timezone
31
+ local_time = datetime.datetime.now(tz).strftime("%Y-%m-%d %H:%M:%S")
32
+ return f"The current local time in {timezone} is: {local_time}"
33
  except Exception as e:
34
+ return f"Error fetching time for timezone '{timezone}': {str(e)}"
 
 
35
 
 
 
36
 
37
+ final_answer = FinalAnswerTool()
38
+ model = HfApiModel(
39
+ max_tokens=2096,
40
+ temperature=0.5,
41
+ model_id='Qwen/Qwen2.5-Coder-32B-Instruct',
42
+ custom_role_conversions=None,
43
  )
44
 
45
 
46
  # Import tool from Hub
47
  image_generation_tool = load_tool("agents-course/text-to-image", trust_remote_code=True)
48
 
49
+ # Load system prompt from prompt.yaml file
50
  with open("prompts.yaml", 'r') as stream:
51
  prompt_templates = yaml.safe_load(stream)
52
 
53
  agent = CodeAgent(
54
  model=model,
55
+ tools=[final_answer], # add your tools here (don't remove final_answer)
56
  max_steps=6,
57
  verbosity_level=1,
58
  grammar=None,
59
  planning_interval=None,
60
  name=None,
61
  description=None,
62
+ prompt_templates=prompt_templates # Pass system prompt to CodeAgent
63
  )
64
 
65