Spaces:
Configuration error
Configuration error
File size: 5,588 Bytes
8c6e3b3 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 |
from langchain_community.tools import DuckDuckGoSearchRun
from typing import TypedDict,Annotated
from langgraph.graph.message import add_messages
from langchain_core.messages import AnyMessage ,HumanMessage,AIMessage
from langgraph.prebuilt import ToolNode
from langgraph.graph import START,StateGraph
from langgraph.prebuilt import tools_condition
from langchain_groq import ChatGroq
from langchain.tools import Tool
from huggingface_hub import list_models
import random
from dotenv import load_dotenv
import os
from langchain_community.utilities import SerpAPIWrapper
load_dotenv()
os.environ["GROQ_API_KEY"]=os.getenv("GROQ_API_KEY")
#os.environ["SERPAPI_API_KEY"]=os.getenv("SERPAPI_API_KEY")
groq_api_key=os.getenv("GROQ_API_KEY")
serp_api_key=os.getenv("SERPAPI_API_KEY")
from langchain_community.utilities import SerpAPIWrapper
search = SerpAPIWrapper(serpapi_api_key=serp_api_key)
search_tool = Tool(
name="SerpAPI Search",
func=search.run,
description="Search the web using SerpAPI"
)
### weather tool
def get_weather_info(location: str) -> str:
"""Fetches dummy weather information for a given location."""
# Dummy weather data
weather_conditions = [
{"condition": "Rainy", "temp_c": 15},
{"condition": "Clear", "temp_c": 25},
{"condition": "Windy", "temp_c": 20}
]
# Randomly select a weather condition
data = random.choice(weather_conditions)
return f"Weather in {location}: {data['condition']}, {data['temp_c']}°C"
# Initialize the tool
weather_info_tool = Tool(
name="get_weather_info",
func=get_weather_info,
description="Fetches dummy weather information for a given location."
)
##most downloaded
def get_hub_stats(author: str) -> str:
"""Fetches the most downloaded model from a specific author on the Hugging Face Hub."""
try:
# List models from the specified author, sorted by downloads
models = list(list_models(author=author, sort="downloads", direction=-1, limit=1))
if models:
model = models[0]
return f"The most downloaded model by {author} is {model.id} with {model.downloads:,} downloads."
else:
return f"No models found for author {author}."
except Exception as e:
return f"Error fetching models for {author}: {str(e)}"
# Initialize the tool
hub_stats_tool = Tool(
name="get_hub_stats",
func=get_hub_stats,
description="Fetches the most downloaded model from a specific author on the Hugging Face Hub."
)
### langchain
import datasets
from langchain.docstore.document import Document
from langchain_community.retrievers import BM25Retriever
from langchain.tools import Tool
from typing import TypedDict, Annotated
from langgraph.graph.message import add_messages
from langchain_core.messages import AnyMessage,HumanMessage,AIMessage
from langgraph.prebuilt import ToolNode
from langgraph.graph import START,StateGraph
from langgraph.prebuilt import tools_condition
from langchain_huggingface import HuggingFaceEndpoint ,ChatHuggingFace
from dotenv import load_dotenv
from langchain_groq import ChatGroq
import os
load_dotenv()
os.environ["GROQ_API_KEY"]=os.getenv("GROQ_API_KEY")
groq_api_key=os.getenv("GROQ_API_KEY")
# Load the dataset
guest_dataset = datasets.load_dataset("agents-course/unit3-invitees", split="train")
# Convert dataset entries into Document objects
docs = [
Document(
page_content="\n".join([
f"Name: {guest['name']}",
f"Relation: {guest['relation']}",
f"Description: {guest['description']}",
f"Email: {guest['email']}"
]),
metadata={"name": guest["name"]}
)
for guest in guest_dataset
]
bm25_retriever = BM25Retriever.from_documents(docs)
def extract_text(query: str) -> str:
"""Retrieves detailed information about gala guests based on their name or relation."""
results = bm25_retriever.invoke(query)
if results:
return "\n\n".join([doc.page_content for doc in results[:3]])
else:
return "No matching guest information found."
guest_info_tool = Tool(
name="guest_info_retriever",
func=extract_text,
description="Retrieves detailed information about gala guests based on their name or relation."
)
# Generate the chat interface , including the tools
llm = ChatGroq(model="Gemma2-9b-It",groq_api_key=groq_api_key)
tools = [guest_info_tool,search_tool,weather_info_tool,hub_stats_tool]
llm_with_tools = llm.bind_tools(tools)
# Generate the AgentState and Agent graph
class AgentState(TypedDict):
messages: Annotated[list[AnyMessage], add_messages]
def assistant(state: AgentState):
return {
"messages": [llm_with_tools.invoke(state["messages"])],
}
## The graph
builder = StateGraph(AgentState)
# Define nodes: these do the work
builder.add_node("assistant", assistant)
builder.add_node("tools", ToolNode(tools))
# Define edges: these determine how the control flow moves
builder.add_edge(START, "assistant")
builder.add_conditional_edges(
"assistant",
# If the latest message requires a tool, route to tools
# Otherwise, provide a direct response
tools_condition,
)
builder.add_edge("tools", "assistant")
alfred = builder.compile()
messages = [HumanMessage(content="Tell me about our guest named 'Lady Ada Lovelace'.")]
response = alfred.invoke({"messages": messages})
|