File size: 2,556 Bytes
959296e
c92c3bf
 
9b5b26a
 
c19d193
4c2a00d
d4578f1
4db54a2
67c8f73
 
65b9821
95de6d7
9b5b26a
 
d4578f1
9b5b26a
453152e
6e73783
9b5b26a
 
 
32b8e07
 
bac848f
9b5b26a
 
 
 
 
453e1a8
9b5b26a
 
 
8c01ffb
 
3335de4
 
 
65b9821
 
 
 
 
 
 
67c8f73
65b9821
 
1cde8a3
aa945ae
 
 
1dfe865
aa945ae
 
 
0ae92d5
1cde8a3
0ae92d5
 
 
 
 
 
8c01ffb
c3644fc
9b5b26a
c114199
8c01ffb
85f8fc6
861422e
 
85f8fc6
 
 
 
 
 
 
1d70844
776a454
c114199
8c01ffb
8fe992b
453152e
8c01ffb
1795753
8c01ffb
 
3d41af7
 
d4578f1
 
6e73783
8fe992b
 
9b5b26a
c114199
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
from smolagents import CodeAgent,DuckDuckGoSearchTool, HfApiModel,load_tool,tool


import datetime
import requests
import yaml
import os
import pytz # Had to give it permission in Code agent  

from agents.tools import tools

from check_endpoint import is_huggingface_endpoint

from Gradio_UI import GradioUI


@tool
def get_the_current_time_in_timezone(timezone: str) -> str:
    
    """A tool that fetches the current local time in a specified timezone.
    Args:
        timezone: A string representing a valid timezone (e.g., 'America/New_York').

    Returns: 
         string:  A sentence that provides the time using the  12-hour clock including  AM/PM
    """
    try:
        # Create timezone object
        tz = pytz.timezone(timezone)
        # Get current time in that timezone
        local_time = datetime.datetime.now(tz).strftime("%I:%M %p")  # %I for 12-hour clock, %M for minutes, %p for AM/PM
        return f"The current local time in {timezone} is: {local_time}"
    except Exception as e:
        return f"Error fetching time for timezone '{timezone}': {str(e)}"



my_id = os.getenv("QWEN_URI")


# Test the endpoint

if is_huggingface_endpoint(my_id):
    print("This is a Hugging Face Inference Endpoint.")
else:
    print("This is NOT a Hugging Face Inference Endpoint.")
    sys.exit(1)  # Stop execution if the endpoint is not valid


'''
model = HfApiModel(
    max_tokens=2096,
    temperature=0.5,
    model_id= my_id,
     custom_role_conversions=None,
 )

'''
'''
model = LiteLLMModel(
  model_id="gemini/gemini-2.0-flash-exp",
  max_tokens=2096,
 temperature=0.6,
 api_key=os.getenv("LITELLM_API_KEY")
)

'''
# Import tool from Hub
#image_generation_tool = load_tool("agents-course/text-to-image", trust_remote_code=True)

# Load prompts.yaml
with open("prompts.yaml", 'r') as stream:
    prompt_templates = yaml.safe_load(stream)

# Load contentprompts.yml
with open("contentprompts.yml", 'r') as stream:
    content_prompts = yaml.safe_load(stream)

combined_prompts = {**prompt_templates, **content_prompts}


#  web_search, visit_webpage 
'''    
agent = CodeAgent(
    model=model,
    tools=[final_answer, polite_guard, web_search, get_the_current_time_in_timezone ], ## add your tools here (don't remove final answer)
    max_steps=6,
    verbosity_level=3,
    grammar=None,
    planning_interval=None,
    name="Content Agent",
    description="Evaluates whether text is polite or impolite. ",
    prompt_templates=combined_prompts,
    additional_authorized_imports=["pytz"]
 
)


GradioUI(agent).launch()

'''