from .ollama_llm import get_data from datetime import datetime # from ..config import settings from config import settings import ollama from ..utils import llm import traceback from .ollama_llm import greetings_function import sys sys.path.append("..") class OllamaContextQuery: def __init__(self) -> None: # self.client=ollama.Client(host=settings.HOST_URI) self.client = ollama.Client(host=settings.HOST_URI) def ollama_context_query(self, chat_history, user_query): print(user_query) prompt = ( "You are a Contextualization engine for Restaurant Order Analytics. Your job is to rewrite the user's natural language query so it is optimized for tool processing without altering its original meaning utilizing given chat history.\n" "IMPORTANT: You have below tool available:\n" # "- 'greet tool': For queries related to greetings or conversational starters.\n" "- 'get-data': For queries related to restaurant order data, top-selling items, or date-related information.\n" "GUIDELINES:\n" "- If the query involves any tool utilization, ensure you return tool name in response nothing else.\n" "- If the query don't involves any tool utilization, respond naturally and ask to try diffrent question in regards to know about their restaturant analytics and don't mention any tool name nothing else.\n" "- Convert any date information to the format YYYY-MM-DD.\n" "- If someone asks about you (e.g., 'Who are you?'), respond with: 'I am SAHL analytics bot.'\n" "- Include previous chat history between the user and assistant to ensure continuity in the conversation.\n" "- Make minimal changes to the query. If no changes are needed, return the query as is.\n" "- Ensure that the meaning and context of the query are preserved exactly.\n" f"NOTE: The current date and time is: {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}\n" f"PREVIOUS CHAT HISTORY of a USER : {chat_history}" "Use above chat history to generate context query, don't include in response." ) system_msg = {"role": "system", "content": prompt} user_msg = {"role": "user", "content": user_query} response = self.client.chat( model="llama3.1", messages=[system_msg, user_msg], ) context_query = response["message"]["content"] print("CONTEXT .... ") print(context_query) if "get-data" in context_query: return context_query, True return context_query, False def ollama_tool_call(self, context_query): print(context_query) try: prompt = ( "You are a Tool Calling engine. Assume queries are related to Restaurant Order Analytics.\n" f'Note that the current date and time is: {datetime.now().strftime("%Y-%m-%d")}\n' "if There is greetings or anything like that give responce according to that like in natural language chat according to it.\n" "IMPORTANT RULES:\n" "- If the query is about 'today,' the start date should be today's date, and the end date should be tomorrow's date.\n" "- If the query is about 'yesterday,' the start date should be the date of the day before yesterday, and the end date should be yesterday's date.\n" "- If the query specifies a particular date, set the start date to that date and the end date to the day after that date (e.g., for 2024-07-23, start date: 2024-07-23, end date: 2024-07-24).\n" "- For 'last week,' set the start date to 7 days before today, and the end date will be today's date.\n" "- For 'last month,' set the start date to the same day of the previous month (or the 1st of the previous month if today is the 1st), and the end date to today's date.\n" "-STRICTLY:(IMPORTANT) For 'last X days' (e.g., last 5 days), set the start date to X days before today, and the end date to today's date.\n" "- Always ensure dates are in the yyyy-mm-dd format.\n" "STRICTLY:\n" "- For any range, the end date must be the day after the last date in the range. For example, for the range 2024-07-20 to 2024-07-23, the start date is 2024-07-20, and the end date is 2024-07-24.\n" "IMPORTANT: You need to take the same as the Contextual query you have been provided cause it is already optimized, no need to change the query. Your main work is to choose the start date and end date." "IMPORTANT : Few Examples : Lets Say Today's date is 2024-10-09" "EXAMPLES:\n" "Example 1: Query about today\n" 'User Query: "What are the total orders for today?"\n' 'Model Response: "For today, the start date is 2024-10-09, and the end date is 2024-10-10."\n\n' "Example 2: Query about yesterday\n" 'User Query: "What was the revenue yesterday? or "\n' 'Model Response: "For yesterday, the start date is 2024-10-08, and the end date is 2024-10-09."\n\n' "Example 3: Query about a specific date\n" 'User Query: "Show me the orders for 2024-07-23."\n' 'Model Response: "For the specific date 2024-07-23, the start date is 2024-07-23, and the end date is 2024-07-24."\n\n' "Example 4: Query about the last week\n" 'User Query: "What were the best-selling items last week?"\n' 'Model Response: "For last week, the start date is 2024-10-03, and the end date is 2024-10-09."\n\n' "Example 5: Query about the last month\n" 'User Query: "What was the total revenue for last month?"\n' 'Model Response: "For last month, the start date is 2024-09-01, and the end date is 2024-10-01."\n\n' "Example 6: Query about the last X days\n" 'User Query: "what is revenue last 7 days."\n' 'Model Response: "For the last 7 days, the start date is 2024-10-03, and the end date is 2024-10-09."\n\n' 'User Query: "what is revenue last 2 days."\n' 'Model Response: "For the last 2 days, the start date is 2024-10-07, and the end date is 2024-10-09."\n\n' 'User Query: "what is revenue last 5 days."\n' 'Model Response: "For the last 5 days, the start date is 2024-10-04, and the end date is 2024-10-09."\n\n' "MOST_IMPORTANT : Query about the last X days : take the start date , the x days before todays's date and the end date should be today's Date , as simple as that no need to overcook anything" f"IMPORTANT : Note that the Today's date : {datetime.now().strftime('%Y-%m-%d')}\n" "you have today's date take the start and end date according ly through the examples." ) messages = [{"role": "system", "content": prompt}] messages.append({"role": "user", "content": context_query}) response = self.client.chat( model="llama3.1", messages=messages, tools=[ { "type": "function", "function": { "name": "get_data", "description": "Get Restaurant Order Information with detailed with natural language query", "parameters": { "type": "object", "properties": { "query": { "type": "string", "description": "natural language query string to be processed", }, "start_date": { "type": "string", "description": "Start date in YYYY-MM-DD format", }, "end_date": { "type": "string", "description": "End date in YYYY-MM-DD format", }, }, "required": ["query"], }, }, } # { # "type": "function", # "function": { # "name": "greetings_function", # "description": "Returns an appropriate greeting message based on the user's query", # "parameters": { # "type": "object", # "properties": { # "query": { # "type": "string", # "description": "The user's query to determine the appropriate greeting", # }, # }, # "required": ["query"], # }, # }, # }, ], ) messages.append(response["message"]) tool_calls = response["message"].get("tool_calls", []) if tool_calls: tool_name = tool_calls[0].get("function").get("name") # if tool_name == "greetings_function": # return self.handle_greetings(response) if tool_name == "get_data": return self.handle_get_data(tool_calls) else: return response["message"]["content"] else: return response["message"]["content"], False except Exception as e: print(f"Error: {e}") return "There was an error processing the query." def handle_greetings(self, response): tool_args = ( response["message"]["tool_calls"][0].get("function").get("arguments") ) greeting_message = greetings_function(tool_args["query"]) print("**** GREETING MESSAGE ****") print(greeting_message) print("**************************") return greeting_message, True def handle_get_data(self, tool_calls): available_functions = {"get_data": get_data} for tool in tool_calls: function_to_call = available_functions[tool["function"]["name"]] function_args = tool["function"]["arguments"] function_response = function_to_call(**function_args) print( f"**** DATA RESPONSE ****\n{function_response}\n***********************" ) return function_response, False def summarised_output(self, messages, chat_history, context_query, user_query): prompt = ( "You are a Summarised responses engine. Assume queries are related to Restaurant Order Analytics.\n" "Your task is to provide relevant information about user query, in prompt you will get all the information about user's chat history and all the information provided by the tool calling expert.\n" "Use only provided information Don't use any extra information from your end.\n" f'Note that the current date and time is: {datetime.now().strftime("%Y-%m-%d %H:%M:%S")}\n' "Merge the sentence in simplified way , dont over cook anything just a basic merger" "always show the numbers if provided." "Never Compare." "Merge the sentence in a simplified way, without adding unnecessary context." "if the answer is too long , without junk than provide the full answer in simplified way , and provide in the list" "Remove any phrases like 'Based on the JSON schema...' or 'Here is the relevant information in simplified form." "Provide the final result directly, formatted for customer readability." "If data not found , dont use json data word , just decline that perticular question. there is none" "Ignore unnecessary lines which does not need to show in answer , cause we need to show this to customer so just give the relevent information, no need to use any extra lines EXAMPLE:{top seeling item is this with order count of this.} This is how a simplified version looks like" f"PREVIOUS CHAT HISTORY: {chat_history} , never include this in response" f"If the question is new than no need to consider {chat_history}" f"Current Tool responce: {messages}" f"Contextualization query: {context_query}" "IMPORTANT : Here i am providing user_query so you can answer according to that." "Example 1 :" "user_query : What is the last day's revenue?" "expected_response" "Last day's revenue is $1,200." "Example 2 :" "user_query : What was the top-selling item yesterday?" "expected_response" "The top-selling item yesterday was Margherita Pizza with 120 orders." "Example 3 :" "user_query : How many orders were placed today?" "expected_response" "There were 350 orders placed today." "Exaple 4 :" "user_query : What is the revenue for the last 7 days?" "expected_response" "Revenue for the last 7 days is $8,500." "use this user queary to generate response according to the user query , if last day asked must use last day same for week , last 7 days , last 15 days , what ever asked just use that and give answer after : this" f"user_query : {user_query}" "STRICT : Don't use 'However, I didn't use any tool this time or Try a different question regarding restaurant analytics.' this type of any suggetions or anything" ) print("user_query : ",user_query) print("messages : ",messages) system_msg = {"role": "system", "content": prompt} user_msg = {"role": "user", "content": user_query} response = self.client.chat( model="llama3.1", messages=[system_msg, user_msg], ) context_query = response["message"]["content"] return context_query