Aly Mostafa commited on
Commit
8c6e3b3
·
unverified ·
1 Parent(s): c6c86f0

Add files via upload

Browse files
Files changed (2) hide show
  1. alfred.py +27 -0
  2. tools.py +179 -0
alfred.py ADDED
@@ -0,0 +1,27 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # alfred_streamlit.py
2
+ import streamlit as st
3
+ from langchain_core.messages import HumanMessage
4
+ from tools import alfred # Import your LangGraph agent
5
+
6
+ st.set_page_config(page_title="🕵️ Alfred - AI Assistant", page_icon="🎩")
7
+
8
+ st.title("🎩 Alfred - Your AI Assistant")
9
+ st.markdown("Ask Alfred anything. He’s connected to weather, search, model stats, and even your guest list!")
10
+
11
+ if "chat_history" not in st.session_state:
12
+ st.session_state.chat_history = []
13
+
14
+ # Chat input
15
+ user_input = st.chat_input("Ask Alfred...")
16
+
17
+ if user_input:
18
+ st.session_state.chat_history.append(HumanMessage(content=user_input))
19
+ with st.spinner("Alfred is thinking..."):
20
+ response = alfred.invoke({"messages": st.session_state.chat_history})
21
+ ai_response = response['messages'][-1].content
22
+ st.session_state.chat_history.append(response['messages'][-1])
23
+
24
+ # Display chat history
25
+ for msg in st.session_state.chat_history:
26
+ role = "🤵 Alfred" if msg.type == "ai" else "🧑 You"
27
+ st.chat_message(role).markdown(msg.content)
tools.py ADDED
@@ -0,0 +1,179 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from langchain_community.tools import DuckDuckGoSearchRun
2
+ from typing import TypedDict,Annotated
3
+ from langgraph.graph.message import add_messages
4
+ from langchain_core.messages import AnyMessage ,HumanMessage,AIMessage
5
+ from langgraph.prebuilt import ToolNode
6
+ from langgraph.graph import START,StateGraph
7
+ from langgraph.prebuilt import tools_condition
8
+ from langchain_groq import ChatGroq
9
+ from langchain.tools import Tool
10
+ from huggingface_hub import list_models
11
+ import random
12
+ from dotenv import load_dotenv
13
+ import os
14
+ from langchain_community.utilities import SerpAPIWrapper
15
+ load_dotenv()
16
+ os.environ["GROQ_API_KEY"]=os.getenv("GROQ_API_KEY")
17
+ #os.environ["SERPAPI_API_KEY"]=os.getenv("SERPAPI_API_KEY")
18
+ groq_api_key=os.getenv("GROQ_API_KEY")
19
+ serp_api_key=os.getenv("SERPAPI_API_KEY")
20
+
21
+ from langchain_community.utilities import SerpAPIWrapper
22
+
23
+ search = SerpAPIWrapper(serpapi_api_key=serp_api_key)
24
+ search_tool = Tool(
25
+ name="SerpAPI Search",
26
+ func=search.run,
27
+ description="Search the web using SerpAPI"
28
+ )
29
+
30
+
31
+
32
+
33
+
34
+ ### weather tool
35
+ def get_weather_info(location: str) -> str:
36
+ """Fetches dummy weather information for a given location."""
37
+ # Dummy weather data
38
+ weather_conditions = [
39
+ {"condition": "Rainy", "temp_c": 15},
40
+ {"condition": "Clear", "temp_c": 25},
41
+ {"condition": "Windy", "temp_c": 20}
42
+ ]
43
+ # Randomly select a weather condition
44
+ data = random.choice(weather_conditions)
45
+ return f"Weather in {location}: {data['condition']}, {data['temp_c']}°C"
46
+
47
+ # Initialize the tool
48
+ weather_info_tool = Tool(
49
+ name="get_weather_info",
50
+ func=get_weather_info,
51
+ description="Fetches dummy weather information for a given location."
52
+ )
53
+
54
+
55
+
56
+ ##most downloaded
57
+ def get_hub_stats(author: str) -> str:
58
+ """Fetches the most downloaded model from a specific author on the Hugging Face Hub."""
59
+ try:
60
+ # List models from the specified author, sorted by downloads
61
+ models = list(list_models(author=author, sort="downloads", direction=-1, limit=1))
62
+
63
+ if models:
64
+ model = models[0]
65
+ return f"The most downloaded model by {author} is {model.id} with {model.downloads:,} downloads."
66
+ else:
67
+ return f"No models found for author {author}."
68
+ except Exception as e:
69
+ return f"Error fetching models for {author}: {str(e)}"
70
+
71
+ # Initialize the tool
72
+ hub_stats_tool = Tool(
73
+ name="get_hub_stats",
74
+ func=get_hub_stats,
75
+ description="Fetches the most downloaded model from a specific author on the Hugging Face Hub."
76
+ )
77
+
78
+
79
+
80
+
81
+
82
+
83
+
84
+
85
+
86
+
87
+ ### langchain
88
+ import datasets
89
+ from langchain.docstore.document import Document
90
+ from langchain_community.retrievers import BM25Retriever
91
+ from langchain.tools import Tool
92
+ from typing import TypedDict, Annotated
93
+ from langgraph.graph.message import add_messages
94
+ from langchain_core.messages import AnyMessage,HumanMessage,AIMessage
95
+ from langgraph.prebuilt import ToolNode
96
+ from langgraph.graph import START,StateGraph
97
+ from langgraph.prebuilt import tools_condition
98
+ from langchain_huggingface import HuggingFaceEndpoint ,ChatHuggingFace
99
+ from dotenv import load_dotenv
100
+ from langchain_groq import ChatGroq
101
+ import os
102
+ load_dotenv()
103
+ os.environ["GROQ_API_KEY"]=os.getenv("GROQ_API_KEY")
104
+ groq_api_key=os.getenv("GROQ_API_KEY")
105
+ # Load the dataset
106
+ guest_dataset = datasets.load_dataset("agents-course/unit3-invitees", split="train")
107
+
108
+ # Convert dataset entries into Document objects
109
+ docs = [
110
+ Document(
111
+ page_content="\n".join([
112
+ f"Name: {guest['name']}",
113
+ f"Relation: {guest['relation']}",
114
+ f"Description: {guest['description']}",
115
+ f"Email: {guest['email']}"
116
+ ]),
117
+ metadata={"name": guest["name"]}
118
+ )
119
+ for guest in guest_dataset
120
+ ]
121
+
122
+
123
+
124
+
125
+
126
+
127
+ bm25_retriever = BM25Retriever.from_documents(docs)
128
+
129
+ def extract_text(query: str) -> str:
130
+ """Retrieves detailed information about gala guests based on their name or relation."""
131
+ results = bm25_retriever.invoke(query)
132
+ if results:
133
+ return "\n\n".join([doc.page_content for doc in results[:3]])
134
+ else:
135
+ return "No matching guest information found."
136
+
137
+ guest_info_tool = Tool(
138
+ name="guest_info_retriever",
139
+ func=extract_text,
140
+ description="Retrieves detailed information about gala guests based on their name or relation."
141
+ )
142
+
143
+ # Generate the chat interface , including the tools
144
+ llm = ChatGroq(model="Gemma2-9b-It",groq_api_key=groq_api_key)
145
+ tools = [guest_info_tool,search_tool,weather_info_tool,hub_stats_tool]
146
+ llm_with_tools = llm.bind_tools(tools)
147
+
148
+ # Generate the AgentState and Agent graph
149
+ class AgentState(TypedDict):
150
+ messages: Annotated[list[AnyMessage], add_messages]
151
+
152
+ def assistant(state: AgentState):
153
+ return {
154
+ "messages": [llm_with_tools.invoke(state["messages"])],
155
+ }
156
+
157
+ ## The graph
158
+ builder = StateGraph(AgentState)
159
+
160
+ # Define nodes: these do the work
161
+ builder.add_node("assistant", assistant)
162
+ builder.add_node("tools", ToolNode(tools))
163
+
164
+ # Define edges: these determine how the control flow moves
165
+ builder.add_edge(START, "assistant")
166
+ builder.add_conditional_edges(
167
+ "assistant",
168
+ # If the latest message requires a tool, route to tools
169
+ # Otherwise, provide a direct response
170
+ tools_condition,
171
+ )
172
+ builder.add_edge("tools", "assistant")
173
+ alfred = builder.compile()
174
+
175
+ messages = [HumanMessage(content="Tell me about our guest named 'Lady Ada Lovelace'.")]
176
+ response = alfred.invoke({"messages": messages})
177
+
178
+
179
+