File size: 6,580 Bytes
569c81e
9b5b26a
 
 
c19d193
6aae614
569c81e
 
e5b4fd0
569c81e
6d3872e
 
569c81e
 
 
6d3872e
 
5f2831a
6d3872e
569c81e
 
5f2831a
569c81e
e5b4fd0
9b5b26a
e5b4fd0
 
9b5b26a
e5b4fd0
9b5b26a
569c81e
 
6d3872e
569c81e
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
6d3872e
569c81e
 
 
 
 
 
 
6d3872e
569c81e
6d3872e
569c81e
 
 
 
 
 
 
 
6d3872e
569c81e
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
9b5b26a
 
 
 
 
 
 
 
 
 
 
 
8c01ffb
e5b4fd0
6d3872e
 
569c81e
6d3872e
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
569c81e
 
 
6aae614
e5b4fd0
e121372
569c81e
 
 
 
13d500a
8c01ffb
569c81e
 
8c01ffb
861422e
 
569c81e
8c01ffb
8fe992b
6d3872e
 
 
 
 
 
 
 
 
8c01ffb
 
 
 
 
 
861422e
8fe992b
 
569c81e
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
from smolagents import CodeAgent, DuckDuckGoSearchTool, HfApiModel, load_tool, tool
import datetime
import requests
import pytz
import yaml
from tools.final_answer import FinalAnswerTool
import os
from huggingface_hub import InferenceClient
from Gradio_UI import GradioUI
from dotenv import load_dotenv
import random  # Added for tell_joke()

load_dotenv()

hf_token = os.getenv("HF_TOKEN")
alpha_vantage_api_key = os.getenv(
    "ALPHA_VANTAGE_API_KEY")  # Load Alpha Vantage API key

# Custom tool example


@tool
def my_custom_tool(arg1: str, arg2: int) -> str:
    """A tool that does nothing yet 
    Args:
        arg1: the first argument
        arg2: the second argument
    """
    return "What magic will you build ?"


def get_weather_report_at_coordinates(coordinates, date_time):
    # Dummy function, returns [temperature in °C, risk of rain 0-1, wave height in m]
    return [28.0, 0.35, 0.85]


def convert_location_to_coordinates(location):
    # Returns dummy coordinates
    return [3.3, -42.0]


@tool
def get_weather_api(location: str, date_time: str) -> str:
    """
    Returns the weather report.
    Args:
        location: the name of the place that you want the weather for.
        date_time: the date and time for which you want the report.
    """
    lon, lat = convert_location_to_coordinates(location)
    date_time = datetime.datetime.strptime(date_time, "%Y-%m-%d %H:%M:%S")
    return str(get_weather_report_at_coordinates((lon, lat), date_time))


user_data = {}


def update_personality(name: str, personality: str) -> str:
    """Asks the user about his personality before predicting his future"""
    user_data[name] = personality
    return f"Great! Thanks {name}, I've updated your personality traits. Now ask me about your future."


client = InferenceClient(model="Qwen/Qwen2.5-Coder-32B-Instruct")


@tool
def predict_future_with_model(name: str, personality: str) -> str:
    """
    Returns a fun and futuristic AI-generated prediction.
    Args:
        name: The user's name.
        personality: A description of the user's personality traits.
    """
    prompt = f"""
    Given the name '{name}' and personality traits '{personality}', generate a fun, futuristic prediction for their life.
    Your response should include:
    - A career path
    - A major life event
    - The number of kids they might have
    - A quirky or funny twist related to their personality
    Keep it engaging, futuristic, and a little humorous!
    """
    try:
        response = client.text_generation(prompt, max_new_tokens=100)
        return f"🔮 **Future Prediction for {name}:**\n{response}"
    except Exception as e:
        return f"Oops! I couldn't predict the future this time. Error: {str(e)}"


@tool
def get_current_time_in_timezone(timezone: str) -> str:
    """A tool that fetches the current local time in a specified timezone.
    Args:
        timezone: A string representing a valid timezone (e.g., 'America/New_York').
    """
    try:
        tz = pytz.timezone(timezone)
        local_time = datetime.datetime.now(tz).strftime("%Y-%m-%d %H:%M:%S")
        return f"The current local time in {timezone} is: {local_time}"
    except Exception as e:
        return f"Error fetching time for timezone '{timezone}': {str(e)}"


@tool
def get_financial_price(ticker: str) -> str:
    """
    Fetches the real-time price of a stock, cryptocurrency, or financial product using Alpha Vantage API.
    Args:
        ticker: The ticker symbol (e.g., 'AAPL' for Apple stock, 'BTCUSD' for Bitcoin/USD).
    """
    if not alpha_vantage_api_key:
        return "Error: Alpha Vantage API key not found. Please set ALPHA_VANTAGE_API_KEY in your .env file."

    # Determine if it's a crypto or stock based on ticker format (simplified logic)
    is_crypto = len(ticker) > 5 and ticker.endswith(
        ("USD", "BTC", "ETH"))  # e.g., BTCUSD, ETHBTC
    if is_crypto:
        url = f"https://www.alphavantage.co/query?function=CURRENCY_EXCHANGE_RATE&from_currency={ticker[:3]}&to_currency={ticker[3:]}&apikey={alpha_vantage_api_key}"
    else:
        url = f"https://www.alphavantage.co/query?function=GLOBAL_QUOTE&symbol={ticker}&apikey={alpha_vantage_api_key}"

    try:
        response = requests.get(url)
        data = response.json()

        if is_crypto:
            if "Realtime Currency Exchange Rate" in data:
                price = data["Realtime Currency Exchange Rate"]["5. Exchange Rate"]
                return f"The current price of {ticker[:3]} in {ticker[3:]} is {float(price):.2f} {ticker[3:]}."
            else:
                return f"Error: Could not fetch crypto price for {ticker}. Check the ticker symbol."
        else:
            if "Global Quote" in data and "05. price" in data["Global Quote"]:
                price = data["Global Quote"]["05. price"]
                return f"The current price of {ticker} is ${float(price):.2f} USD."
            else:
                return f"Error: Could not fetch stock price for {ticker}. Check the ticker symbol or API limits."

    except Exception as e:
        return f"Error fetching price for {ticker}: {str(e)}"


@tool
def tell_joke() -> str:
    """Returns a random stored joke."""
    jokes = [
        "Why do we tell actors to 'break a leg?' Because every play has a cast.",
        "I told my wife she should embrace her mistakes. She gave me a hug.",
        "I'm reading a book on the history of glue. I just can't seem to put it down.",
        "I would tell you a joke about an elevator, but it's an uplifting experience.",
        "I told my computer I needed a break and now it won't stop sending me vacation ads.",
        "I used to play piano by ear, but now I use my hands"
    ]
    return random.choice(jokes)


final_answer = FinalAnswerTool()

model = HfApiModel(
    max_tokens=2096,
    temperature=0.5,
    model_id='Qwen/Qwen2.5-Coder-32B-Instruct',
    custom_role_conversions=None,
)

image_generation_tool = load_tool(
    "agents-course/text-to-image", trust_remote_code=True)

with open("prompts.yaml", 'r') as stream:
    prompt_templates = yaml.safe_load(stream)

agent = CodeAgent(
    model=model,
    tools=[
        my_custom_tool,
        get_weather_api,
        predict_future_with_model,
        get_current_time_in_timezone,
        get_financial_price,  # New tool added here
        tell_joke,
        final_answer
    ],
    max_steps=6,
    verbosity_level=1,
    grammar=None,
    planning_interval=None,
    name=None,
    description=None,
    prompt_templates=prompt_templates
)

GradioUI(agent).launch()