Spaces:
Sleeping
Sleeping
updated gemini agent
Browse files
app.py
CHANGED
@@ -4,109 +4,32 @@ import requests
|
|
4 |
import inspect
|
5 |
import pandas as pd
|
6 |
|
7 |
-
from smolagents import CodeAgent, DuckDuckGoSearchTool, OpenAIServerModel, LiteLLMModel
|
8 |
-
from langchain_openai import AzureChatOpenAI
|
9 |
from typing import Optional, Dict
|
|
|
10 |
|
11 |
# (Keep Constants as is)
|
12 |
# --- Constants ---
|
13 |
DEFAULT_API_URL = "https://agents-course-unit4-scoring.hf.space"
|
14 |
|
15 |
|
16 |
-
class AzureOpenAIServerModel(OpenAIServerModel):
|
17 |
-
"""This model connects to an Azure OpenAI deployment.
|
18 |
-
|
19 |
-
Parameters:
|
20 |
-
model_id (`str`):
|
21 |
-
The model identifier to use on the server (e.g. "gpt-3.5-turbo").
|
22 |
-
azure_endpoint (`str`, *optional*):
|
23 |
-
The Azure endpoint, including the resource, e.g. `https://example-resource.azure.openai.com/`
|
24 |
-
api_key (`str`, *optional*):
|
25 |
-
The API key to use for authentication.
|
26 |
-
custom_role_conversions (`Dict{str, str]`, *optional*):
|
27 |
-
Custom role conversion mapping to convert message roles in others.
|
28 |
-
Useful for specific models that do not support specific message roles like "system".
|
29 |
-
**kwargs:
|
30 |
-
Additional keyword arguments to pass to the Azure OpenAI API.
|
31 |
-
"""
|
32 |
-
|
33 |
-
def __init__(
|
34 |
-
self,
|
35 |
-
model_id: str,
|
36 |
-
azure_endpoint: Optional[str] = None,
|
37 |
-
api_key: Optional[str] = None,
|
38 |
-
api_version: Optional[str] = None,
|
39 |
-
custom_role_conversions: Optional[Dict[str, str]] = None,
|
40 |
-
**kwargs,
|
41 |
-
):
|
42 |
-
super().__init__(model_id=model_id, api_key=api_key, custom_role_conversions=custom_role_conversions, **kwargs)
|
43 |
-
# if we've reached this point, it means the openai package is available (baseclass check) so go ahead and import it
|
44 |
-
import openai
|
45 |
-
|
46 |
-
self.client = openai.AzureOpenAI(
|
47 |
-
api_key=api_key,
|
48 |
-
api_version=api_version,
|
49 |
-
azure_endpoint=azure_endpoint
|
50 |
-
)
|
51 |
-
|
52 |
# --- Basic Agent Definition ---
|
53 |
# ----- THIS IS WERE YOU CAN BUILD WHAT YOU WANT ------
|
54 |
class BasicAgent:
|
55 |
def __init__(self):
|
56 |
print("Initializing the BasicAgent")
|
57 |
-
|
58 |
-
#
|
59 |
-
|
60 |
-
|
61 |
-
#
|
62 |
-
|
63 |
-
|
64 |
-
|
65 |
|
66 |
-
#
|
67 |
-
|
68 |
-
|
69 |
-
# # print("GPT-4o LLM is instantiated")
|
70 |
-
# print(f"{gemini_model_id} LLM instantiated using LiteLLM")
|
71 |
-
# except Exception as e:
|
72 |
-
# print(f"Error instantiating LLM: {e}")
|
73 |
-
# print("Please ensure you have installed smolagents[litellm] and google-generativeai,")
|
74 |
-
# print("and that your GOOGLE_API_KEY environment variable is set.")
|
75 |
-
# raise e # Re-raise the exception to fail initialization if model fails
|
76 |
-
|
77 |
-
print(f"AZURE_API_ENDPOINT: {os.getenv('AZURE_API_ENDPOINT')}")
|
78 |
-
print(f"AZURE_DEPLOYMENT_NME: {os.getenv('AZURE_DEPLOYMENT_NME')}")
|
79 |
-
print(f"AZURE_API_VERSION: {os.getenv('AZURE_API_VERSION')}")
|
80 |
-
# Be cautious printing the key, maybe just check if it exists
|
81 |
-
print(f"AZURE_OPENAI_KEY set: {'AZURE_OPENAI_KEY' in os.environ}")
|
82 |
-
|
83 |
-
# model = AzureChatOpenAI(
|
84 |
-
# azure_endpoint=os.getenv("AZURE_API_ENDPOINT"),
|
85 |
-
# azure_deployment=os.getenv("AZURE_DEPLOYMENT_NME"), # or your deployment
|
86 |
-
# api_key=os.getenv("AZURE_OPENAI_KEY"),
|
87 |
-
# api_version=os.getenv("AZURE_API_VERSION"),
|
88 |
-
# temperature=0.5,
|
89 |
-
# max_tokens=None,
|
90 |
-
# timeout=None,
|
91 |
-
# max_retries=2,
|
92 |
-
# )
|
93 |
-
model = AzureOpenAIServerModel(
|
94 |
-
model_id = os.environ.get("AZURE_DEPLOYMENT_NME"),
|
95 |
-
api_key=os.environ.get("AZURE_OPENAI_KEY"),
|
96 |
-
api_version=os.environ.get("AZURE_API_VERSION"),
|
97 |
-
azure_endpoint=os.environ.get("AZURE_API_ENDPOINT")
|
98 |
-
)
|
99 |
-
# Initialize the search tool
|
100 |
-
search_tool = DuckDuckGoSearchTool()
|
101 |
-
print("DuckDuckSearchTool instantiated")
|
102 |
|
103 |
-
print("Agent is now created")
|
104 |
-
# Initializing Agent
|
105 |
-
self.agent = CodeAgent(
|
106 |
-
model = model,
|
107 |
-
tools=[search_tool]
|
108 |
-
)
|
109 |
-
|
110 |
def __call__(self, question: str) -> str:
|
111 |
print(f"Agent received question (first 50 chars): {question[:50]}...")
|
112 |
final_answer = self.agent.run(question)
|
|
|
4 |
import inspect
|
5 |
import pandas as pd
|
6 |
|
|
|
|
|
7 |
from typing import Optional, Dict
|
8 |
+
from gemini_agent import GeminiAgent
|
9 |
|
10 |
# (Keep Constants as is)
|
11 |
# --- Constants ---
|
12 |
DEFAULT_API_URL = "https://agents-course-unit4-scoring.hf.space"
|
13 |
|
14 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
15 |
# --- Basic Agent Definition ---
|
16 |
# ----- THIS IS WERE YOU CAN BUILD WHAT YOU WANT ------
|
17 |
class BasicAgent:
|
18 |
def __init__(self):
|
19 |
print("Initializing the BasicAgent")
|
20 |
+
|
21 |
+
# Load environment variables
|
22 |
+
load_dotenv()
|
23 |
+
|
24 |
+
# Get Gemini API key
|
25 |
+
api_key = os.getenv('GOOGLE_API_KEY')
|
26 |
+
if not api_key:
|
27 |
+
raise ValueError("GOOGLE_API_KEY environment variable not set.")
|
28 |
|
29 |
+
# Initialize GeminiAgent
|
30 |
+
self.agent = GeminiAgent(api_key=api_key)
|
31 |
+
print("GeminiAgent initialized successfully")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
32 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
33 |
def __call__(self, question: str) -> str:
|
34 |
print(f"Agent received question (first 50 chars): {question[:50]}...")
|
35 |
final_answer = self.agent.run(question)
|