File size: 2,295 Bytes
9b5b26a
7d059b1
9b5b26a
c19d193
7d059b1
6aae614
8fe992b
7d059b1
9b5b26a
 
5df72d6
9b5b26a
7d059b1
 
 
9b5b26a
7d059b1
 
 
 
 
9b5b26a
 
7d059b1
 
 
 
 
 
 
 
 
 
 
 
 
 
9b5b26a
7d059b1
8c01ffb
 
6aae614
ae7a494
 
 
 
e121372
bf6d34c
 
29ec968
fe328e0
13d500a
8c01ffb
 
9b5b26a
 
8c01ffb
861422e
 
9b5b26a
8c01ffb
8fe992b
7d059b1
8c01ffb
 
 
 
 
 
861422e
8fe992b
 
9b5b26a
8c01ffb
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
from smolagents import CodeAgent,DuckDuckGoSearchTool, HfApiModel,load_tool,tool
from meteostat import Stations, Daily
import requests
import yaml
import datetime
from tools.final_answer import FinalAnswerTool


from Gradio_UI import GradioUI

# Below is an example of a tool that does nothing. Amaze us with your creativity !
@tool
def World_temperature(lat: float, lon: float) -> str:
    """Fetches the current temperature in a specified location using latitude & longitude.
    
    Args:
        lat: Latitude of the location (e.g., 51.51 for London).
        lon: Longitude of the location (e.g., -0.13 for London).
    
    Returns:
        A string with the current temperature.
    """
    try:
        # Get nearest weather station
        stations = Stations().nearby(lat, lon).fetch(1)
        station_id = stations.index[0]  

        # Get today's weather data
        today = datetime.datetime.today()
        data = Daily(station_id, today, today).fetch()

        if not data.empty:
            temp = data["tavg"].iloc[0]  # Get average temperature
            return f"The current temperature at ({lat}, {lon}) is {temp}°C."
        else:
            return "No temperature data available for this location."

    except Exception as e:
        return f"Error fetching temperature: {str(e)}"


final_answer = FinalAnswerTool()

# If the agent does not answer, the model is overloaded, please use another model or the following Hugging Face Endpoint that also contains qwen2.5 coder:
# model_id='https://pflgm2locj2t89co.us-east-1.aws.endpoints.huggingface.cloud' 

model = HfApiModel(
max_tokens=2096,
temperature=0.5,
model_id='Qwen/Qwen2.5-Coder-32B-Instruct',# it is possible that this model may be overloaded
custom_role_conversions=None,
)


# Import tool from Hub
image_generation_tool = load_tool("agents-course/text-to-image", trust_remote_code=True)

with open("prompts.yaml", 'r') as stream:
    prompt_templates = yaml.safe_load(stream)
    
agent = CodeAgent(
    model=model,
    tools=[final_answer], [World_temperature] ## add your tools here (don't remove final answer)
    max_steps=6,
    verbosity_level=1,
    grammar=None,
    planning_interval=None,
    name=None,
    description=None,
    prompt_templates=prompt_templates
)


GradioUI(agent).launch()