Spaces:
Paused
Paused
Update app.py
Browse files
app.py
CHANGED
@@ -302,115 +302,6 @@ def bot(history, choice, tts_choice, retrieval_mode):
|
|
302 |
|
303 |
history.append([response, None]) # Ensure the response is added in the correct format
|
304 |
|
305 |
-
# Langchain imports
|
306 |
-
from langchain_openai import ChatOpenAI
|
307 |
-
from langchain.agents import tool
|
308 |
-
from langchain_core.prompts import ChatPromptTemplate, MessagesPlaceholder
|
309 |
-
from langchain_core.messages import AIMessage, HumanMessage
|
310 |
-
from langchain.agents.format_scratchpad.openai_tools import (
|
311 |
-
format_to_openai_tool_messages,)
|
312 |
-
from langchain.agents.output_parsers.openai_tools import OpenAIToolsAgentOutputParser
|
313 |
-
from langchain.agents import AgentExecutor
|
314 |
-
from langchain import OpenAI
|
315 |
-
|
316 |
-
|
317 |
-
|
318 |
-
# Step 1: Define the restaurant tool
|
319 |
-
@tool
|
320 |
-
def fetch_restaurant_info(query: str) -> str:
|
321 |
-
"""
|
322 |
-
Fetches restaurant-related information from SERP API based on the given query.
|
323 |
-
"""
|
324 |
-
from serpapi.google_search import GoogleSearch
|
325 |
-
|
326 |
-
# Define parameters for SERP API
|
327 |
-
params = {
|
328 |
-
"engine": "yelp",
|
329 |
-
"find_desc": query,
|
330 |
-
"find_loc": "Birmingham, AL, USA",
|
331 |
-
"api_key": os.getenv("SERP_API")
|
332 |
-
}
|
333 |
-
|
334 |
-
# Fetch data from SERP API
|
335 |
-
search = GoogleSearch(params)
|
336 |
-
results = search.get_dict()
|
337 |
-
organic_results = results.get("organic_results", [])
|
338 |
-
|
339 |
-
# Prepare the output in plain text
|
340 |
-
if organic_results:
|
341 |
-
response = ""
|
342 |
-
for result in organic_results:
|
343 |
-
name = result.get("title", "No name")
|
344 |
-
rating = result.get("rating", "No rating")
|
345 |
-
reviews = result.get("reviews", "No reviews")
|
346 |
-
phone = result.get("phone", "Not Available")
|
347 |
-
snippet = result.get("snippet", "Not Available")
|
348 |
-
services = result.get("service_options", "Not Known")
|
349 |
-
|
350 |
-
if isinstance(services, list):
|
351 |
-
services = ", ".join(services)
|
352 |
-
elif isinstance(services, dict):
|
353 |
-
services = ", ".join([f"{key}: {value}" for key, value in services.items()])
|
354 |
-
|
355 |
-
link = result.get("link", "#")
|
356 |
-
response += f"Name: {name}\nRating: {rating}\nReviews: {reviews}\nPhone: {phone}\nSnippet: {snippet}\nServices: {services}\nLink: {link}\n\n"
|
357 |
-
|
358 |
-
return response
|
359 |
-
else:
|
360 |
-
return "No restaurant information found."
|
361 |
-
|
362 |
-
# Step 2: Integrate the tool with the agent
|
363 |
-
tools = [fetch_restaurant_info]
|
364 |
-
|
365 |
-
# Define the prompt template
|
366 |
-
MEMORY_KEY = "chat_history"
|
367 |
-
prompt = ChatPromptTemplate.from_messages(
|
368 |
-
[
|
369 |
-
(
|
370 |
-
"system",
|
371 |
-
"You are a very powerful assistant, but you only respond with restaurant information when asked about restaurants.",
|
372 |
-
),
|
373 |
-
MessagesPlaceholder(variable_name=MEMORY_KEY),
|
374 |
-
("user", "{input}"),
|
375 |
-
MessagesPlaceholder(variable_name="agent_scratchpad"),
|
376 |
-
]
|
377 |
-
)
|
378 |
-
|
379 |
-
# Define the chat history
|
380 |
-
chat_history = []
|
381 |
-
|
382 |
-
# Create the agent with the tool
|
383 |
-
llm = OpenAI(api_key=os.getenv("OPENAI_API_KEY"), temperature=0, model="gpt-4o")
|
384 |
-
agent = (
|
385 |
-
{
|
386 |
-
"input": lambda x: x["input"],
|
387 |
-
"agent_scratchpad": lambda x: format_to_openai_tool_messages(x["intermediate_steps"]),
|
388 |
-
"chat_history": lambda x: x["chat_history"],
|
389 |
-
}
|
390 |
-
| prompt
|
391 |
-
| llm
|
392 |
-
| OpenAIToolsAgentOutputParser(tools=tools)
|
393 |
-
)
|
394 |
-
|
395 |
-
agent_executor = AgentExecutor(agent=agent, tools=tools, verbose=True)
|
396 |
-
|
397 |
-
# Step 3: Update the chatbot function
|
398 |
-
def chatbot_response(user_input, history, choice, tts_choice, retrieval_mode):
|
399 |
-
# Check if the user input is related to restaurants
|
400 |
-
if "restaurant" in user_input.lower():
|
401 |
-
result = agent_executor.invoke({"input": user_input, "chat_history": history})
|
402 |
-
history.append([user_input, result["output"]])
|
403 |
-
return history, None
|
404 |
-
else:
|
405 |
-
# Use the existing logic for non-restaurant-related queries
|
406 |
-
response, addresses = generate_answer(user_input, choice, retrieval_mode)
|
407 |
-
history.append([user_input, response])
|
408 |
-
return history, None
|
409 |
-
|
410 |
-
|
411 |
-
|
412 |
-
|
413 |
-
|
414 |
def add_message(history, message):
|
415 |
history.append((message, None))
|
416 |
return history, gr.Textbox(value="", interactive=True, placeholder="Enter message or upload file...", show_label=False)
|
|
|
302 |
|
303 |
history.append([response, None]) # Ensure the response is added in the correct format
|
304 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
305 |
def add_message(history, message):
|
306 |
history.append((message, None))
|
307 |
return history, gr.Textbox(value="", interactive=True, placeholder="Enter message or upload file...", show_label=False)
|