susmitsil commited on
Commit
9d0dd09
·
verified ·
1 Parent(s): 192da2a
Files changed (1) hide show
  1. gemini_agent.py +7 -32
gemini_agent.py CHANGED
@@ -1,10 +1,5 @@
1
- from typing import List
2
- from langchain_community.tools.ddg_search import DuckDuckGoSearchRun
3
- from langchain.agents import AgentType, initialize_agent
4
- from langchain.tools.base import BaseTool
5
  from langchain_google_genai import ChatGoogleGenerativeAI
6
  from langchain_core.messages import SystemMessage
7
- from langchain.memory import ConversationBufferMemory
8
 
9
  class GeminiAgent:
10
  def __init__(self, api_key: str, model_name: str = "gemini-2.0-flash"):
@@ -21,39 +16,19 @@ class GeminiAgent:
21
 
22
  def _setup_agent(self):
23
  # Initialize model with system message
24
- model = ChatGoogleGenerativeAI(
25
  model=self.model_name,
26
  google_api_key=self.api_key,
27
- temperature=0, # Lower temperature for faster, more focused responses
28
- max_output_tokens=200, # Limit response length
29
- convert_system_message_to_human=True, # Faster processing of system message
30
- stream=True, # Enable streaming for faster initial response
31
- system_message=SystemMessage(content="You are a concise AI assistant. Provide a short and accurate answer. Preferable answer should be in one word or line. Unless if query asked expects an elaborate answer.")
32
- )
33
-
34
- # Setup tools
35
- tools: List[BaseTool] = [DuckDuckGoSearchRun()]
36
-
37
- # Setup memory
38
- memory = ConversationBufferMemory(
39
- memory_key="chat_history",
40
- return_messages=True
41
- )
42
-
43
- # Create and return agent
44
- return initialize_agent(
45
- tools,
46
- model,
47
- agent=AgentType.CHAT_CONVERSATIONAL_REACT_DESCRIPTION,
48
- memory=memory,
49
- verbose=False,
50
- handle_parsing_errors=True
51
  )
52
 
53
  def run(self, query: str) -> str:
54
  try:
55
- result = self.agent.invoke({"input": query})
56
- return result["output"]
57
  except Exception as e:
58
  return f"Error: {e}"
59
 
 
 
 
 
 
1
  from langchain_google_genai import ChatGoogleGenerativeAI
2
  from langchain_core.messages import SystemMessage
 
3
 
4
  class GeminiAgent:
5
  def __init__(self, api_key: str, model_name: str = "gemini-2.0-flash"):
 
16
 
17
  def _setup_agent(self):
18
  # Initialize model with system message
19
+ return ChatGoogleGenerativeAI(
20
  model=self.model_name,
21
  google_api_key=self.api_key,
22
+ temperature=0, # Lower temperature for focused responses
23
+ max_output_tokens=2000, # Increased for more detailed responses
24
+ convert_system_message_to_human=True,
25
+ system_message=SystemMessage(content="You are a helpful AI assistant. For the Wikipedia question, use the latest 2022 English Wikipedia version as your knowledge source. For the YouTube video question, analyze the video content carefully and count the maximum number of different bird species visible simultaneously in any frame.")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
26
  )
27
 
28
  def run(self, query: str) -> str:
29
  try:
30
+ response = self.agent.invoke(query)
31
+ return response.content
32
  except Exception as e:
33
  return f"Error: {e}"
34