Spaces:
Paused
Paused
Update app.py
Browse files
app.py
CHANGED
@@ -33,7 +33,7 @@ from langchain.chains.conversation.memory import ConversationBufferWindowMemory
|
|
33 |
from langchain.agents import Tool, initialize_agent
|
34 |
|
35 |
# Build prompt
|
36 |
-
template1 = """You are an expert concierge who is helpful and a renowned guide for Omaha, Nebraska.Based on the current weather condition , Use the following pieces of context,
|
37 |
memory, and message history, along with your knowledge of perennial events in Omaha, Nebraska, to answer the question at the end.If you don't know the answer, just say "Homie, I need to get more data for this," and don't try to make up an answer.
|
38 |
Use fifteen sentences maximum. Keep the answer as detailed as possible. Always include the address, time, date, and
|
39 |
event type and description. Always say "It was my pleasure!" at the end of the answer.
|
@@ -41,11 +41,9 @@ event type and description. Always say "It was my pleasure!" at the end of the a
|
|
41 |
Question: {question}
|
42 |
Helpful Answer:"""
|
43 |
|
44 |
-
template2 = """You are an expert guide
|
45 |
-
|
46 |
-
|
47 |
-
If you don't know the answer, simply say, "Homie, I need to get more data for this," without making up an answer.
|
48 |
-
|
49 |
{context}
|
50 |
Question: {question}
|
51 |
Helpful Answer:"""
|
@@ -101,8 +99,8 @@ def generate_answer(message, choice):
|
|
101 |
elif choice == "Conversational":
|
102 |
agent = initialize_agent_with_prompt(QA_CHAIN_PROMPT_2)
|
103 |
else:
|
104 |
-
logging.error(f"Invalid prompt_choice: {choice}. Defaulting to '
|
105 |
-
agent = initialize_agent_with_prompt(
|
106 |
|
107 |
response = agent(message)
|
108 |
return response['output']
|
@@ -454,7 +452,7 @@ def clear_textbox():
|
|
454 |
return ""
|
455 |
|
456 |
def show_map_if_details(history,choice):
|
457 |
-
|
458 |
return gr.update(visible=True), update_map_with_response(history)
|
459 |
else:
|
460 |
return gr.update(visible=False), ""
|
|
|
33 |
from langchain.agents import Tool, initialize_agent
|
34 |
|
35 |
# Build prompt
|
36 |
+
template1 = """You are an expert concierge who is helpful and a renowned guide for Omaha, Nebraska.Based on the current weather condition as {today weather} and current date as {17th june 2024} , Use the following pieces of context,
|
37 |
memory, and message history, along with your knowledge of perennial events in Omaha, Nebraska, to answer the question at the end.If you don't know the answer, just say "Homie, I need to get more data for this," and don't try to make up an answer.
|
38 |
Use fifteen sentences maximum. Keep the answer as detailed as possible. Always include the address, time, date, and
|
39 |
event type and description. Always say "It was my pleasure!" at the end of the answer.
|
|
|
41 |
Question: {question}
|
42 |
Helpful Answer:"""
|
43 |
|
44 |
+
template2 = """You are an expert concierge who is helpful and a renowned guide for Omaha, Nebraska.Based on the current weather condition as {today weather} and current date as {17th june 2024} , Use the following pieces of context,
|
45 |
+
memory, and message history, along with your knowledge of perennial events in Omaha, Nebraska, to answer the question at the end.If you don't know the answer, just say "Homie, I need to get more data for this," and don't try to make up an answer.
|
46 |
+
Use fifteen sentences maximum. Keep the answer short and sweet crisp.Always say "It was my pleasure!" at the end of the answer.
|
|
|
|
|
47 |
{context}
|
48 |
Question: {question}
|
49 |
Helpful Answer:"""
|
|
|
99 |
elif choice == "Conversational":
|
100 |
agent = initialize_agent_with_prompt(QA_CHAIN_PROMPT_2)
|
101 |
else:
|
102 |
+
logging.error(f"Invalid prompt_choice: {choice}. Defaulting to 'Conversational'")
|
103 |
+
agent = initialize_agent_with_prompt(QA_CHAIN_PROMPT_2)
|
104 |
|
105 |
response = agent(message)
|
106 |
return response['output']
|
|
|
452 |
return ""
|
453 |
|
454 |
def show_map_if_details(history,choice):
|
455 |
+
if choice in ["Details", "Conversational"]:
|
456 |
return gr.update(visible=True), update_map_with_response(history)
|
457 |
else:
|
458 |
return gr.update(visible=False), ""
|