File size: 4,417 Bytes
95ca499 1e7a57d 95ca499 c947ea7 5ec7b71 1e7a57d 892745c 1e7a57d c947ea7 1e7a57d c947ea7 1e7a57d c947ea7 892745c 1e7a57d 892745c 1e7a57d 1c73b9c 1e7a57d 741a8ce 1b5e3e9 741a8ce 1e7a57d 741a8ce 1e7a57d 741a8ce 1b5e3e9 741a8ce 1c73b9c 1e7a57d 892745c 1e7a57d 95ca499 c947ea7 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 |
def handle_query(query: str, detail: bool = False) -> str:
"""
Main function to process the query.
:param query: The user's question.
:param detail: Whether the user wants a more detailed response.
:return: Response string from Daily Wellness AI.
"""
if not query or not isinstance(query, str) or len(query.strip()) == 0:
return "Please provide a valid question."
try:
# 1) Sanity Check: Determine if the question is relevant to daily wellness
is_relevant = sanity_checker.is_relevant(query)
if not is_relevant:
return "Your question seems out of context or not related to daily wellness. Please ask a wellness-related question."
# 2) Proceed with retrieval from the knowledge base
retrieved = retriever.retrieve(query)
# 3) Check the cache
cached_answer = get_cached_answer(query)
# 4) If no retrieved data from the knowledge base
if not retrieved:
# If we do have a cached answer, return it
if cached_answer:
logger.info("No relevant entries found in knowledge base. Returning cached answer.")
return cached_answer
# Otherwise, no KB results and no cache => no answer
return "I'm sorry, I couldn't find an answer to your question."
# 5) We have retrieved data; let's check for similarity threshold
top_score = retrieved[0][1] # Assuming the list is sorted descending
similarity_threshold = 0.3 # Adjust this threshold based on empirical results
if top_score < similarity_threshold:
# (Low similarity) Perform web search using manager_agent
logger.info("Similarity score below threshold. Performing web search.")
web_search_response = manager_agent.run(query)
logger.debug(f"Web search response: {web_search_response}")
# Combine any cached answer (if it exists) with the web result
if cached_answer:
blend_prompt = (
f"Combine the following previous answer with the new web results to create a more creative and accurate response. "
f"Do not include any of the previous prompt or instructions in your response. "
f"Add positivity and conclude with a short inspirational note.\n\n"
f"Previous Answer:\n{cached_answer}\n\n"
f"Web Results:\n{web_search_response}"
)
final_answer = llm._call(blend_prompt).strip()
else:
# If no cache, just return the web response
final_answer = (
f"**Daily Wellness AI**\n\n"
f"{web_search_response}\n\n"
"Disclaimer: This information is retrieved from the web and is not a substitute for professional medical advice.\n\n"
"Wishing you a calm and wonderful day!"
)
# Store in cache
store_in_cache(query, final_answer)
return final_answer
# 6) If similarity is sufficient, we will finalize an answer from the knowledge base
responses = [ans for ans, score in retrieved]
# 6a) If we have a cached answer, let's blend it with the new knowledge base data
if cached_answer:
blend_prompt = (
f"Combine the previous answer with the newly retrieved answers to enhance creativity and accuracy. "
f"Do not include any of the previous prompt or instructions in your response. "
f"Add new insights, creativity, and conclude with a short inspirational note.\n\n"
f"Previous Answer:\n{cached_answer}\n\n"
f"New Retrieved Answers:\n" + "\n".join(f"- {r}" for r in responses)
)
final_answer = llm._call(blend_prompt).strip()
else:
# 6b) No cache => proceed with normal expansions
final_answer = answer_expander.expand(query, responses, detail=detail)
# 7) Store new or blended answer in cache
store_in_cache(query, final_answer)
return final_answer
except Exception as e:
logger.error(f"Error handling query: {e}")
logger.debug("Exception details:", exc_info=True)
return "An error occurred while processing your request."
|