Spaces:
Paused
Paused
Update app.py
Browse files
app.py
CHANGED
@@ -247,16 +247,65 @@ _search_query = RunnableBranch(
|
|
247 |
# Use natural language and be concise.
|
248 |
# Answer:"""
|
249 |
|
250 |
-
|
251 |
-
|
252 |
In light of this, how can I assist you today? Feel free to ask any questions or seek recommendations for your day in Birmingham. If there's anything specific you'd like to know or experience, please share, and I'll be glad to help. Remember, keep the question concise for a quick and accurate response.
|
253 |
"It was my pleasure!"
|
254 |
{{context}}
|
255 |
Question: {{question}}
|
256 |
Helpful Answer:"""
|
257 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
258 |
|
259 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
260 |
|
261 |
chain_neo4j = (
|
262 |
RunnableParallel(
|
@@ -271,6 +320,28 @@ chain_neo4j = (
|
|
271 |
)
|
272 |
|
273 |
# Define a function to select between Pinecone and Neo4j
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
274 |
def generate_answer(message, choice, retrieval_mode):
|
275 |
logging.debug(f"generate_answer called with choice: {choice} and retrieval_mode: {retrieval_mode}")
|
276 |
|
@@ -287,12 +358,16 @@ def generate_answer(message, choice, retrieval_mode):
|
|
287 |
logging.debug(f"Vector response: {response}")
|
288 |
return response['result'], extract_addresses(response['result'])
|
289 |
elif retrieval_mode == "Knowledge-Graph":
|
290 |
-
|
|
|
|
|
|
|
291 |
logging.debug(f"Knowledge-Graph response: {response}")
|
292 |
-
return response,
|
293 |
else:
|
294 |
return "Invalid retrieval mode selected.", []
|
295 |
|
|
|
296 |
def bot(history, choice, tts_choice, retrieval_mode):
|
297 |
if not history:
|
298 |
return history
|
|
|
247 |
# Use natural language and be concise.
|
248 |
# Answer:"""
|
249 |
|
250 |
+
# Define conversational and detailed prompt templates for Neo4j responses
|
251 |
+
neo4j_conversational_template = f"""As an expert concierge known for being helpful and a renowned guide for Birmingham, Alabama, I assist visitors in discovering the best that the city has to offer. Given today's sunny and bright weather on {current_date}, I am well-equipped to provide valuable insights and recommendations without revealing specific locations. I draw upon my extensive knowledge of the area, including perennial events and historical context.
|
252 |
In light of this, how can I assist you today? Feel free to ask any questions or seek recommendations for your day in Birmingham. If there's anything specific you'd like to know or experience, please share, and I'll be glad to help. Remember, keep the question concise for a quick and accurate response.
|
253 |
"It was my pleasure!"
|
254 |
{{context}}
|
255 |
Question: {{question}}
|
256 |
Helpful Answer:"""
|
257 |
|
258 |
+
neo4j_details_template = f"""As an expert concierge in Birmingham, Alabama, known for being a helpful and renowned guide, I am here to assist you on this sunny bright day of {current_date}. Given the current weather conditions and date, I have access to a plethora of information regarding events, places, and activities in Birmingham that can enhance your experience.
|
259 |
+
If you have any questions or need recommendations, feel free to ask. I have a wealth of knowledge of perennial events in Birmingham and can provide detailed information to ensure you make the most of your time here. Remember, I am here to assist you in any way possible.
|
260 |
+
Now, let me guide you through some of the exciting events happening today in Birmingham, Alabama:
|
261 |
+
Address: >>, Birmingham, AL
|
262 |
+
Time: >>__
|
263 |
+
Date: >>__
|
264 |
+
Description: >>__
|
265 |
+
Address: >>, Birmingham, AL
|
266 |
+
Time: >>__
|
267 |
+
Date: >>__
|
268 |
+
Description: >>__
|
269 |
+
Address: >>, Birmingham, AL
|
270 |
+
Time: >>__
|
271 |
+
Date: >>__
|
272 |
+
Description: >>__
|
273 |
+
Address: >>, Birmingham, AL
|
274 |
+
Time: >>__
|
275 |
+
Date: >>__
|
276 |
+
Description: >>__
|
277 |
+
Address: >>, Birmingham, AL
|
278 |
+
Time: >>__
|
279 |
+
Date: >>__
|
280 |
+
Description: >>__
|
281 |
+
If you have any specific preferences or questions about these events or any other inquiries, please feel free to ask. Remember, I am here to ensure you have a memorable and enjoyable experience in Birmingham, AL.
|
282 |
+
It was my pleasure!
|
283 |
+
{{context}}
|
284 |
+
Question: {{question}}
|
285 |
+
Helpful Answer:"""
|
286 |
|
287 |
+
# Create prompt templates
|
288 |
+
QA_CHAIN_PROMPT_NEO4J_CONVERSATIONAL = PromptTemplate(input_variables=["context", "question"], template=neo4j_conversational_template)
|
289 |
+
QA_CHAIN_PROMPT_NEO4J_DETAILS = PromptTemplate(input_variables=["context", "question"], template=neo4j_details_template)
|
290 |
+
|
291 |
+
# Define Neo4j retrieval chain for conversational mode
|
292 |
+
def neo4j_retrieval_conversational(question: str):
|
293 |
+
structured_data = structured_retriever(question)
|
294 |
+
logging.debug(f"Structured data (Conversational): {structured_data}")
|
295 |
+
prompt = QA_CHAIN_PROMPT_NEO4J_CONVERSATIONAL.format(context=structured_data, question=question)
|
296 |
+
response = chat_model({"query": prompt})
|
297 |
+
return response, []
|
298 |
+
|
299 |
+
# Define Neo4j retrieval chain for detailed mode
|
300 |
+
def neo4j_retrieval_details(question: str):
|
301 |
+
structured_data = structured_retriever(question)
|
302 |
+
logging.debug(f"Structured data (Details): {structured_data}")
|
303 |
+
prompt = QA_CHAIN_PROMPT_NEO4J_DETAILS.format(context=structured_data, question=question)
|
304 |
+
response = chat_model({"query": prompt})
|
305 |
+
return response, extract_addresses(response)
|
306 |
+
|
307 |
+
|
308 |
+
# qa_prompt = ChatPromptTemplate.from_template(template)
|
309 |
|
310 |
chain_neo4j = (
|
311 |
RunnableParallel(
|
|
|
320 |
)
|
321 |
|
322 |
# Define a function to select between Pinecone and Neo4j
|
323 |
+
# def generate_answer(message, choice, retrieval_mode):
|
324 |
+
# logging.debug(f"generate_answer called with choice: {choice} and retrieval_mode: {retrieval_mode}")
|
325 |
+
|
326 |
+
# prompt_template = QA_CHAIN_PROMPT_1 if choice == "Details" else QA_CHAIN_PROMPT_2
|
327 |
+
|
328 |
+
# if retrieval_mode == "Vector":
|
329 |
+
# qa_chain = RetrievalQA.from_chain_type(
|
330 |
+
# llm=chat_model,
|
331 |
+
# chain_type="stuff",
|
332 |
+
# retriever=retriever,
|
333 |
+
# chain_type_kwargs={"prompt": prompt_template}
|
334 |
+
# )
|
335 |
+
# response = qa_chain({"query": message})
|
336 |
+
# logging.debug(f"Vector response: {response}")
|
337 |
+
# return response['result'], extract_addresses(response['result'])
|
338 |
+
# elif retrieval_mode == "Knowledge-Graph":
|
339 |
+
# response = chain_neo4j.invoke({"question": message})
|
340 |
+
# logging.debug(f"Knowledge-Graph response: {response}")
|
341 |
+
# return response, extract_addresses(response)
|
342 |
+
# else:
|
343 |
+
# return "Invalid retrieval mode selected.", []
|
344 |
+
|
345 |
def generate_answer(message, choice, retrieval_mode):
|
346 |
logging.debug(f"generate_answer called with choice: {choice} and retrieval_mode: {retrieval_mode}")
|
347 |
|
|
|
358 |
logging.debug(f"Vector response: {response}")
|
359 |
return response['result'], extract_addresses(response['result'])
|
360 |
elif retrieval_mode == "Knowledge-Graph":
|
361 |
+
if choice == "Details":
|
362 |
+
response, addresses = neo4j_retrieval_details(message)
|
363 |
+
else:
|
364 |
+
response, addresses = neo4j_retrieval_conversational(message)
|
365 |
logging.debug(f"Knowledge-Graph response: {response}")
|
366 |
+
return response, addresses
|
367 |
else:
|
368 |
return "Invalid retrieval mode selected.", []
|
369 |
|
370 |
+
|
371 |
def bot(history, choice, tts_choice, retrieval_mode):
|
372 |
if not history:
|
373 |
return history
|