Viraj2307 commited on
Commit
360b354
·
1 Parent(s): a80c2ed

Initial Commit

Browse files
Files changed (43) hide show
  1. Dockerfile +13 -0
  2. Makefile +5 -0
  3. __pycache__/config.cpython-312.pyc +0 -0
  4. app.py +158 -0
  5. config.py +17 -0
  6. notebooks/Main_Ollama_Notebook.ipynb +0 -0
  7. notebooks/ollama_tools_check_main.py +328 -0
  8. notebooks/sahl_daywise_analysis_order_count.ipynb +975 -0
  9. notebooks/store_daywise_reveneu_main.ipynb +0 -0
  10. src/__init__.py +0 -0
  11. src/__pycache__/__init__.cpython-312.pyc +0 -0
  12. src/__pycache__/utils.cpython-312.pyc +0 -0
  13. src/json_creation/__init__.py +5 -0
  14. src/json_creation/__pycache__/__init__.cpython-312.pyc +0 -0
  15. src/json_creation/__pycache__/aggregators.cpython-312.pyc +0 -0
  16. src/json_creation/__pycache__/app.cpython-312.pyc +0 -0
  17. src/json_creation/__pycache__/archive_orders.cpython-312.pyc +0 -0
  18. src/json_creation/__pycache__/items_of_orders_timestamp.cpython-312.pyc +0 -0
  19. src/json_creation/__pycache__/json_schema.cpython-312.pyc +0 -0
  20. src/json_creation/__pycache__/mongo_integration.cpython-312.pyc +0 -0
  21. src/json_creation/__pycache__/ollama_llm.cpython-312.pyc +0 -0
  22. src/json_creation/__pycache__/ollama_tool_calling.cpython-312.pyc +0 -0
  23. src/json_creation/__pycache__/order_states_timestamp.cpython-312.pyc +0 -0
  24. src/json_creation/__pycache__/orders.cpython-312.pyc +0 -0
  25. src/json_creation/__pycache__/revenue.cpython-312.pyc +0 -0
  26. src/json_creation/aggregators.py +28 -0
  27. src/json_creation/app.py +70 -0
  28. src/json_creation/archive_orders.py +83 -0
  29. src/json_creation/items_of_orders_timestamp.py +28 -0
  30. src/json_creation/json_schema.py +69 -0
  31. src/json_creation/mongo_integration.py +84 -0
  32. src/json_creation/ollama_llm.py +164 -0
  33. src/json_creation/ollama_tool_calling.py +246 -0
  34. src/json_creation/order_states_timestamp.py +41 -0
  35. src/json_creation/orders.py +16 -0
  36. src/json_creation/revenue.py +22 -0
  37. src/utils.py +19 -0
  38. src/whatsapp_integration/__init__.py +2 -0
  39. src/whatsapp_integration/__pycache__/__init__.cpython-312.pyc +0 -0
  40. src/whatsapp_integration/__pycache__/gathering_data.cpython-312.pyc +0 -0
  41. src/whatsapp_integration/__pycache__/sending_message.cpython-312.pyc +0 -0
  42. src/whatsapp_integration/gathering_data.py +21 -0
  43. src/whatsapp_integration/sending_message.py +23 -0
Dockerfile ADDED
@@ -0,0 +1,13 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ FROM python:3.9
2
+
3
+ RUN useradd -m -u 1000 user
4
+ USER user
5
+ ENV PATH="/home/user/.local/bin:$PATH"
6
+
7
+ WORKDIR /app
8
+
9
+ COPY --chown=user ./requirements.txt requirements.txt
10
+ RUN pip install --no-cache-dir --upgrade -r requirements.txt
11
+
12
+ COPY --chown=user . /app
13
+ CMD ["uvicorn", "app:app", "--host", "0.0.0.0", "--port", "8000"]
Makefile ADDED
@@ -0,0 +1,5 @@
 
 
 
 
 
 
1
+ install:
2
+ pip install -r requirements.txt
3
+
4
+ up:
5
+ python3 src/main.py
__pycache__/config.cpython-312.pyc ADDED
Binary file (1.38 kB). View file
 
app.py ADDED
@@ -0,0 +1,158 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from fastapi import FastAPI, HTTPException, Request, BackgroundTasks
2
+ from starlette.responses import RedirectResponse
3
+ from src.json_creation import (
4
+ final_counts,
5
+ fetch_response,
6
+ OllamaContextQuery,
7
+ ChatManager,
8
+ )
9
+ from src.whatsapp_integration import gathering_data, normal_message
10
+ from datetime import datetime
11
+ from typing import Optional
12
+ import logging
13
+ from pydantic import BaseModel
14
+ from config import settings
15
+
16
+ # PHONE_NUMBER_ID = os.environ.get("PHONE_NUMBER_ID")
17
+ # BEARER_TOKEN = os.environ.get("BEARER_TOKEN")
18
+ PHONE_NUMBER_ID = settings.PHONE_NUMBER_ID
19
+ BEARER_TOKEN = settings.BEARER_TOKEN
20
+
21
+ logging.basicConfig(
22
+ level=logging.INFO, format="%(asctime)s - %(levelname)s - %(message)s"
23
+ )
24
+
25
+ app = FastAPI()
26
+ chat_manager = ChatManager()
27
+
28
+
29
+ class UserQueryInput(BaseModel):
30
+ phone_number: str
31
+ user_query: str
32
+
33
+
34
+ @app.get("/")
35
+ def read_root():
36
+ return RedirectResponse(url="/docs")
37
+
38
+
39
+ @app.get("/fetch_data")
40
+ def fetch_data(
41
+ store_id: str,
42
+ brand_id: str,
43
+ start_date: Optional[str] = None,
44
+ end_date: Optional[str] = None,
45
+ ):
46
+ if start_date and end_date:
47
+ try:
48
+ start_date_obj = datetime.strptime(start_date, "%Y-%m-%d")
49
+ end_date_obj = datetime.strptime(end_date, "%Y-%m-%d")
50
+ except ValueError:
51
+ return {"error": "Dates must be in YYYY-MM-DD format."}
52
+
53
+ data = fetch_response(store_id, brand_id, None, start_date_obj, end_date_obj)
54
+
55
+ if data:
56
+ return final_counts(data)
57
+ else:
58
+ return {"error": "Failed to fetch data"}
59
+
60
+
61
+ @app.post("/process_message")
62
+ def process_message(phone_number: str, user_query: str):
63
+ str_chat_history = chat_manager.handle_message(phone_number, user_query)
64
+ return {"chat_history": str_chat_history}
65
+
66
+
67
+ @app.post("/process_query")
68
+ def process_query(input_data: UserQueryInput):
69
+ user_query = input_data.user_query
70
+ ollama_class = OllamaContextQuery()
71
+ chat_history = chat_manager.handle_message(input_data.phone_number, user_query)
72
+ try:
73
+ context_query, is_tool_invoke = ollama_class.ollama_context_query(
74
+ chat_history, user_query
75
+ )
76
+ main_llm_response = ""
77
+ final_output = context_query
78
+ if is_tool_invoke:
79
+ main_llm_response = ollama_class.ollama_tool_call(user_query)
80
+ final_output = ollama_class.summarised_output(
81
+ messages=main_llm_response,
82
+ chat_history=chat_history,
83
+ context_query=context_query,
84
+ user_query=input_data.user_query,
85
+ )
86
+ session = chat_manager.get_or_create_session(input_data.phone_number)
87
+ # if is_greet:
88
+ # print("Here 1")
89
+ # chat_manager.save_message(
90
+ # session.id, input_data.phone_number, "ASSISTANT", main_llm_response
91
+ # )
92
+ # print("Here 2")
93
+ # return main_llm_response
94
+
95
+ print("Here 1")
96
+ chat_manager.save_message(
97
+ session.id, input_data.phone_number, "ASSISTANT", final_output
98
+ )
99
+ print("Here 2")
100
+ return final_output
101
+ except Exception as e:
102
+ print(e)
103
+ raise HTTPException(status_code=500, detail=str(e))
104
+
105
+
106
+ @app.get("/receive_msg", tags=["Whatsapp Webhook"])
107
+ async def whatsapp_webhook_get(request: Request):
108
+ # Extract query parameters from the request
109
+ query_params = request.query_params
110
+ hub_mode = query_params.get("hub.mode")
111
+ hub_challenge = query_params.get("hub.challenge")
112
+ hub_verify_token = query_params.get("hub.verify_token")
113
+
114
+ if (
115
+ hub_mode == "subscribe"
116
+ and hub_challenge
117
+ and hub_verify_token == "Sahl-analytics-bot"
118
+ ):
119
+ return int(hub_challenge)
120
+ else:
121
+ return {"detail": "Invalid request or verification token mismatch"}, 400
122
+
123
+
124
+ @app.post("/receive_msg", tags=["Whatsapp Webhook"])
125
+ async def whatsapp_webhook(request: Request, background_tasks: BackgroundTasks):
126
+ data = await request.json()
127
+ background_tasks.add_task(call_query, data)
128
+ return "200 OK HTTPS."
129
+
130
+
131
+ def call_query(data):
132
+ try:
133
+ if (
134
+ data["entry"][0]["changes"][0]["value"]["metadata"]["phone_number_id"]
135
+ == PHONE_NUMBER_ID
136
+ ):
137
+ print("\n New Line starting from here :=" + "=" * 150)
138
+ print(data)
139
+ try:
140
+ phone_number, text = gathering_data(data)
141
+ print(phone_number)
142
+ print(text)
143
+ sample_input = UserQueryInput(
144
+ phone_number=phone_number, user_query=text
145
+ )
146
+ res = process_query(sample_input)
147
+ normal_message(res, phone_number)
148
+
149
+ except Exception as e:
150
+ print("Exception in post whatsapp request :::::: ", e)
151
+ except BaseException:
152
+ pass
153
+
154
+
155
+ if __name__ == "__main__":
156
+ import uvicorn
157
+
158
+ uvicorn.run(app, host="0.0.0.0", port=8000)
config.py ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from pydantic_settings import BaseSettings, SettingsConfigDict
2
+ import os
3
+ from dotenv import load_dotenv
4
+ load_dotenv()
5
+
6
+ class Settings(BaseSettings):
7
+ PHONE_NUMBER_ID: str = "196911033511915"
8
+ BEARER_TOKEN: str = str(os.environ.get("BEARER_TOKEN"))
9
+ MONGODB_URI: str = os.environ.get("MONGODB_URI", "FAKE")
10
+ DB_NAME: str = os.environ.get("DB_NAME", "FAKE")
11
+ HOST_URI: str = os.environ.get("HOST_URI", "FAKE")
12
+ MODEL_NAME: str = os.environ.get("MODEL_NAME", "FAKE")
13
+ DEV_URI: str = os.environ.get("DEV_URI", "FAKE")
14
+ model_config = SettingsConfigDict(env_file=".env", extra="allow")
15
+
16
+
17
+ settings = Settings()
notebooks/Main_Ollama_Notebook.ipynb ADDED
The diff for this file is too large to render. See raw diff
 
notebooks/ollama_tools_check_main.py ADDED
@@ -0,0 +1,328 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import json
2
+ import requests
3
+ from llama_index.core.agent import ReActAgent
4
+ from llama_index.core.indices.struct_store import JSONQueryEngine
5
+ from llama_index.core.tools import BaseTool, FunctionTool
6
+ from llama_index.core.llms import ChatMessage
7
+ from llama_index.core import PromptTemplate
8
+
9
+
10
+ json_schema = {
11
+ "$schema": "http://json-schema.org/draft-07/schema#",
12
+ "title": "RestaurantOrderData",
13
+ "type": "object",
14
+ "properties": {
15
+ "restaurant_name": {
16
+ "type": "string",
17
+ "description": "Name of the restaurant"
18
+ },
19
+ "number_of_times_item_ordered": {
20
+ "type": "object",
21
+ "description": "Number of times each item has been ordered. Each key is item name and value is number of times it get ordered",
22
+ "additionalProperties": {
23
+ "type": "integer",
24
+ "description": "Count of times the item was ordered"
25
+ }
26
+ },
27
+ "number_of_order_daywise": {
28
+ "type": "object",
29
+ "description": "Number of orders received each day. Each key is date and value is number of orders",
30
+ "additionalProperties": {
31
+ "type": "integer",
32
+ "description": "Count of orders for the specific day"
33
+ }
34
+ },
35
+ "number_of_order_canceled": {
36
+ "type": "object",
37
+ "description": "Number of orders received each day. Each key is date and value is number of orders get canceled",
38
+ "additionalProperties": {
39
+ "type": "integer",
40
+ "description": "Count of canceled orders for the specific day"
41
+ }
42
+ },
43
+ "number_of_order_completed": {
44
+ "type": "object",
45
+ "description": "Number of orders received each day. Each key is date and value is number of orders get completed",
46
+ "additionalProperties": {
47
+ "type": "integer",
48
+ "description": "Count of completed orders for the specific day"
49
+ }
50
+ },
51
+ "number_of_order_aggregator_wise": {
52
+ "type": "object",
53
+ "description": "Number of orders received from each aggregator. Each key is aggregator name and value is number of orders get on that specific aggregator",
54
+ "additionalProperties": {
55
+ "type": "integer",
56
+ "description": "Count of orders for the specific aggregator"
57
+ }
58
+ },
59
+ "total_revenue": {
60
+ "type": "number",
61
+ "description": "Total revenue generated"
62
+ },
63
+ "total_orders": {
64
+ "type": "integer",
65
+ "description": "Total number of orders"
66
+ },
67
+ "revenue_daywise": {
68
+ "type": "object",
69
+ "description": "Revenue generated each day. Each key is date and value is total revenue generated on that specific date",
70
+ "additionalProperties": {
71
+ "type": "number",
72
+ "description": "Revenue for the specific day"
73
+ }
74
+ }
75
+ },
76
+ "required": [
77
+ "restaurant_name",
78
+ "number_of_times_item_ordered",
79
+ "number_of_order_daywise",
80
+ "number_of_order_canceled",
81
+ "number_of_order_completed",
82
+ "number_of_order_aggregator_wise",
83
+ "total_revenue",
84
+ "total_orders",
85
+ "revenue_daywise"
86
+ ]
87
+ }
88
+
89
+ def get_data(query: str, start_date:str , end_date:str) -> str:
90
+ print(f"QUERY :: {query}")
91
+ print(f"START DATE :: {start_date}")
92
+ print(f"END DATE :: {end_date}")
93
+
94
+ # from datetime import datetime, timedelta
95
+ # today_date = datetime.today()
96
+ # yesterday_date = today_date - timedelta(days=1)
97
+ # end_date = datetime.today().strftime('%Y-%m-%d')
98
+ # start_date = yesterday_date.strftime('%Y-%m-%d')
99
+
100
+
101
+
102
+ url = f"https://a03a-106-201-234-104.ngrok-free.app/fetch_data?store_id=634fdb58ad4c218c52bfaf4f&brand_id=6347b5f0851f703b75b39ad0&start_date={start_date}&end_date={end_date}"
103
+
104
+ payload = {}
105
+ headers = {
106
+ 'accept': 'application/json'
107
+ }
108
+
109
+ response = requests.request("GET", url, headers=headers, data=payload)
110
+ a = response.json()
111
+ print(a)
112
+
113
+
114
+ def create_dynamic_prompt(query):
115
+ """Create a dynamic prompt."""
116
+ prompt = f"""
117
+ The following is a task for an intelligent assistant:
118
+
119
+ Here is the JSON with order details of a restaurant named "Wrap and Dip Cafe":
120
+ {json.dumps(a, indent=2)}
121
+
122
+ Given the JSON schema for reference:
123
+ {json.dumps(json_schema, indent=2)}
124
+
125
+ You are a JSON analysis engine designed to answer questions based on the given restaurant order data. The data includes various aspects such as the number of times each item has been ordered, the number of orders per day, cancellations, completions, orders by aggregator, total revenue, total orders, and daily revenue.
126
+
127
+ When asked a question, follow these steps:
128
+
129
+ 1. Understand the question and identify the relevant parts of the JSON data.
130
+ 2. Extract the necessary information from the JSON data.
131
+ 3. Perform any required calculations or comparisons.
132
+ 4. Provide a concise and accurate answer without including unnecessary details.
133
+ 5. If you encounter more than one answer, provide them in a list.
134
+ 6. Provide accurate, concise, and clear answers based on the JSON data provided.
135
+ 7. I only want the response to be printed after 'So,the answer is' or 'Therefore,the answer is' and not the based on json data line.
136
+
137
+ Special attention should be given to queries about items ordered the most. These queries require looking into "number_of_times_item_ordered" and identifying the item(s) with the highest count.
138
+
139
+ Here are a few examples of questions you should be able to answer:
140
+
141
+ - "Which item is ordered the most?"
142
+ - Look into "number_of_times_item_ordered" and find the item with the highest count.
143
+
144
+ - "On which date was the highest revenue collected?"
145
+ - Look into "revenue_daywise" and find the date with the highest revenue.
146
+
147
+ - "How many orders were completed on 2024-04-22?"
148
+ - Look into "number_of_order_completed" for the value corresponding to "2024-04-22".
149
+
150
+ - "What is the total revenue generated?"
151
+ - Return the value from "total_revenue".
152
+
153
+ - "How many orders were canceled on 2024-03-13?"
154
+ - Look into "number_of_order_canceled" for the value corresponding to "2024-03-13".
155
+
156
+ - "Find the item with exactly 3 orders."
157
+ - Look into "number_of_times_item_ordered" and find the item(s) with a count of 3.
158
+
159
+ Use these examples to guide your responses to similar questions. If you encounter a new type of question, use the structure and examples to determine how to extract and compute the answer.
160
+
161
+ Remember, your goal is to provide accurate, concise, and clear answers based on the JSON data provided. Do not generate lengthy responses or include detailed breakdowns unless explicitly asked. Return only the direct answer to the question.
162
+
163
+ The user's query is as follows: "{query}"
164
+ """
165
+ messages = [
166
+ ChatMessage(role="system", content=prompt),
167
+ ChatMessage(role="user", content=query),
168
+ ]
169
+ resp = llm.chat(messages)
170
+ return resp.message.content
171
+
172
+
173
+ response = create_dynamic_prompt(query)
174
+ print(response)
175
+ return response
176
+
177
+ from datetime import datetime
178
+ # def get_formatted_conv_str(conv_history: list):
179
+ # formatted_turns = []
180
+ # for turn in conv_history:
181
+ # user_query = turn["HUMAN"]
182
+ # system_query = turn["ASSISTANT"]
183
+
184
+ # # fitler out links and unnecessary info
185
+ # system_query = re.sub(r"\[(.*?)\].*?\(.*?\)", r"\1", system_query)
186
+
187
+ # max_words = 100
188
+ # if len(system_query.split()) > max_words:
189
+ # system_query = " ".join(system_query.split()[:max_words]) + "..."
190
+
191
+ # formatted_turns += [f"User: {user_query}" + f"\nSystem: {system_query}"]
192
+
193
+ # formatted_conversation = "\n\n".join(formatted_turns)
194
+ # return formatted_conversation
195
+
196
+ from datetime import datetime
197
+
198
+ prompt = (
199
+ "You are a Contextualization engine. Assume queries are related to Restaurant Order Analytics."
200
+ # "The following is a conversation between a user (User) and a Order Analytics Assistant (System): \n\n"
201
+ # f"{str_chat_history}"
202
+ "Rewrite the current user natural language query, to be a standalone natural language query that resolves all ambiguities.\n"
203
+ "Do this using common sense and your best judgement, resolving ambiguities by incorporating all relevant information from the conversation history.\n"
204
+ "Make minimal changes, and if no rewriting is required, then return the same final user query.\n"
205
+ "It is essential that you directly focus on the user's final query.\n"
206
+ "If query asked for any date related things, just convert into YYYY-MM-DD format.\n"
207
+ "If nothing mentions about whether the user wants today's, yesterday's, or month's, then just attach today's context.\n"
208
+ "If the keyword is mentioned then there is already a context so make it directly simplified.\n"
209
+ "IMPORTANT: ONLY GIVE CONTEXTUALIZATION QUERY. NOTHING ELSE. REMOVE ALL UNNECESSARY THINGS FROM RESPONSE.\n"
210
+ f'Note that the current date and time is: {datetime.now().strftime("%Y-%m-%d %H:%M:%S")}\n'
211
+ "IMPORTANT : Ensure the date is in the format yyyy-mm-dd"
212
+ )
213
+ system_msg = {"role": "system", "content": prompt}
214
+ user_msg = {"role": "user", "content": str(input())}
215
+ response = client.chat(
216
+ model="llama3.1",
217
+ messages=[system_msg, user_msg],
218
+ )
219
+ context_query = response['message']['content']
220
+ print(context_query)
221
+
222
+
223
+ prompt = (
224
+ "You are a Tool Calling engine. Assume queries are related to Restaurant Order Analytics.\n"
225
+ f'Note that the current date and time is: {datetime.now().strftime("%Y-%m-%d %H:%M:%S")}\n'
226
+ "Take start date and end date according how the question asked if week than take week day same goes for month and so on take it in your understanding how i gave it to you"
227
+ "IMPORTANT : Example for today take start date of a day before and end date of today , same for if yesterday than the day before yesterday as start date and yesterday as end and so on"
228
+ "If asked for a perticular date than need to take start date as that date and end date as the next date so it will give the the data of that perticular date."
229
+ "IMPORTANT: If the range is given then start date will be the first date and the end date will be the date after the last date.must need to take the end date plus 1 to get the data."
230
+ "STRICTLY:Always take end date after the day for which we are finding the data, means if the date is 2024-07-23 than end date must be plus one to the original date to find the data of that perticular date like end_date:2024-07-24 , and the start date is the date same date for which we are finding the data. "
231
+ "IMPORTANT:must take the end date Plus one to the given date no matter what"
232
+ "IMPORTANT : make sure the date is in the yyyy-mm-dd format"
233
+ )
234
+ messages = [{'role': 'system', 'content': prompt}]
235
+ messages.append({'role': 'user', 'content': context_query})
236
+ response = client.chat(
237
+ model="llama3.1",
238
+ messages=messages,
239
+ tools=[
240
+ {
241
+
242
+ 'type': 'function',
243
+
244
+ 'function': {
245
+
246
+ 'name': 'get_data',
247
+
248
+ 'description': "Get Today's Restaurant Order Information with detailed natural language query",
249
+
250
+ 'parameters': {
251
+
252
+ 'type': 'object',
253
+
254
+ 'properties': {
255
+
256
+ 'query': {
257
+
258
+ 'type': 'string',
259
+
260
+ 'description': 'natural language query string to be proceed',
261
+
262
+ },
263
+ "start_date": {
264
+ "type": "string",
265
+ "description": "Start date in YYYY-MM-DD format",
266
+ },
267
+ "end_date": {
268
+ "type": "string",
269
+ "description": "End date in YYYY-MM-DD format",
270
+ },
271
+
272
+ },
273
+
274
+ 'required': ['query'],
275
+
276
+ },
277
+
278
+ },
279
+
280
+ },
281
+
282
+ ],
283
+ )
284
+ messages.append(response['message'])
285
+ if response['message'].get('tool_calls'):
286
+
287
+ available_functions = {
288
+ 'get_data': get_data
289
+ }
290
+ for tool in response['message']['tool_calls']:
291
+ function_to_call = available_functions[tool['function']['name']]
292
+ function_args = tool['function']['arguments']
293
+ function_response = function_to_call(**function_args)
294
+ print(f"Func Res : {function_to_call} {function_response}")
295
+ messages.append(
296
+ {
297
+ 'role': 'tool',
298
+ 'content': function_response,
299
+ }
300
+ )
301
+ else:
302
+ print(response['message']['content'])
303
+
304
+ print(messages)
305
+
306
+ tool_responses = []
307
+
308
+ for message in messages:
309
+ if message['role'] == 'tool':
310
+ tool_responses.append(message['content'])
311
+
312
+ Combnined_Responce = ' , '.join(tool_responses)
313
+
314
+ prompt = (
315
+ "Merge the sentence in simplified way , dont over cook anything just a basic merger"
316
+ "always show the numbers if provided."
317
+ "Never Compare."
318
+ "Just Give the relevent information , remove everything which is junk like Based on the JSON schema... and so on"
319
+ "if the answer is too long without junk than provide the full answer in simplified way , and provide in the list also"
320
+ )
321
+ system_msg = {"role": "system", "content": prompt}
322
+ user_msg = {"role": "user", "content": Combnined_Responce}
323
+ response = client.chat(
324
+ model="llama3.1",
325
+ messages=[system_msg, user_msg],
326
+ )
327
+ context_query = response['message']['content']
328
+ print(context_query)
notebooks/sahl_daywise_analysis_order_count.ipynb ADDED
@@ -0,0 +1,975 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "nbformat": 4,
3
+ "nbformat_minor": 0,
4
+ "metadata": {
5
+ "colab": {
6
+ "provenance": []
7
+ },
8
+ "kernelspec": {
9
+ "name": "python3",
10
+ "display_name": "Python 3"
11
+ },
12
+ "language_info": {
13
+ "name": "python"
14
+ }
15
+ },
16
+ "cells": [
17
+ {
18
+ "cell_type": "code",
19
+ "source": [],
20
+ "metadata": {
21
+ "id": "mt1Mw60b6z8A"
22
+ },
23
+ "execution_count": null,
24
+ "outputs": []
25
+ },
26
+ {
27
+ "cell_type": "code",
28
+ "source": [
29
+ "!pip install pymongo"
30
+ ],
31
+ "metadata": {
32
+ "colab": {
33
+ "base_uri": "https://localhost:8080/"
34
+ },
35
+ "id": "9kRBPHRedNY-",
36
+ "outputId": "1042a2c1-0bba-4ae8-8dad-f9e074a338d3"
37
+ },
38
+ "execution_count": null,
39
+ "outputs": [
40
+ {
41
+ "output_type": "stream",
42
+ "name": "stdout",
43
+ "text": [
44
+ "Requirement already satisfied: pymongo in /usr/local/lib/python3.10/dist-packages (4.8.0)\n",
45
+ "Requirement already satisfied: dnspython<3.0.0,>=1.16.0 in /usr/local/lib/python3.10/dist-packages (from pymongo) (2.6.1)\n"
46
+ ]
47
+ }
48
+ ]
49
+ },
50
+ {
51
+ "cell_type": "code",
52
+ "execution_count": null,
53
+ "metadata": {
54
+ "id": "Ssn3vPsUCBhX"
55
+ },
56
+ "outputs": [],
57
+ "source": [
58
+ "from pymongo import MongoClient\n",
59
+ "client = MongoClient(\"mongodb+srv://chhatrapaligenerate:[email protected]\")"
60
+ ]
61
+ },
62
+ {
63
+ "cell_type": "code",
64
+ "source": [
65
+ "db = client['sahl']\n",
66
+ "orders_collection = db['orders']\n",
67
+ "stores_collection = db['stores']\n",
68
+ "brands_collection=db['brands']"
69
+ ],
70
+ "metadata": {
71
+ "id": "UTVJLdQBc46S"
72
+ },
73
+ "execution_count": null,
74
+ "outputs": []
75
+ },
76
+ {
77
+ "cell_type": "code",
78
+ "source": [
79
+ "orders_projection = {\n",
80
+ " 'data.order.store':1,\n",
81
+ " 'data.order.next_state':1,\n",
82
+ " 'data.order.next_states':1,\n",
83
+ " 'data.order.details.order_type':1,\n",
84
+ " 'data.order.details.id':1,\n",
85
+ " 'data.order.details.ext_platforms':1,\n",
86
+ " 'data.order.details.discount':1,\n",
87
+ " 'data.order.details.item_level_total_charges':1,\n",
88
+ " 'data.order.details.item_level_total_taxes':1,\n",
89
+ " 'data.order.details.item_taxes':1,\n",
90
+ " 'data.order.details.order_level_total_charges':1,\n",
91
+ " 'data.order.details.total_without_taxes':1,\n",
92
+ " \"data.order.details.order_state\":1,\n",
93
+ " \"data.order.details.order_subtotal\":1,\n",
94
+ " \"data.order.details.order_total\":1,\n",
95
+ " \"data.order.details.state\":1,\n",
96
+ " \"data.order.details.taxes\":1,\n",
97
+ " \"data.order.details.charges\":1,\n",
98
+ " \"data.order.details.total_charges\":1,\n",
99
+ " \"data.order.details.total_external_discount\":1,\n",
100
+ " \"data.order.details.total_taxes\":1,\n",
101
+ " \"data.order.details.total_tax_percentage\":1,\n",
102
+ " \"data.order.details.channel\":1,\n",
103
+ " \"data.order.details.created\":1,\n",
104
+ " \"data.order.details.delivery_datetime\":1,\n",
105
+ " \"data.order.details.orderType_label\":1,\n",
106
+ " \"data.order.details.qty\":1,\n",
107
+ " \"data.order.details.invoiceNo\":1,\n",
108
+ " \"data.order.details.total_charges\":1,\n",
109
+ " \"data.order.payment\":1,\n",
110
+ " \"data.order.details.created\":1,\n",
111
+ " \"store_id\":1,\n",
112
+ " \"orderCreatedAt\":1,\n",
113
+ " \"isOfflineOrder\":1,\n",
114
+ " \"createdAt\":1,\n",
115
+ " \"updatedAt\":1,\n",
116
+ "\n",
117
+ "}"
118
+ ],
119
+ "metadata": {
120
+ "id": "wMe0jml8c6FE"
121
+ },
122
+ "execution_count": null,
123
+ "outputs": []
124
+ },
125
+ {
126
+ "cell_type": "code",
127
+ "source": [
128
+ "stores_projection={\n",
129
+ " \"zip_codes\":1,\n",
130
+ " \"geo_longitude\":1,\n",
131
+ " 'geo_latitude':1,\n",
132
+ " 'active':1,\n",
133
+ " 'isAutoAccept':1,\n",
134
+ " 'auto_accept_in_second':1,\n",
135
+ " 'ordering_enabled':1,\n",
136
+ " 'included_platforms':1,\n",
137
+ " 'excluded_platforms':1,\n",
138
+ " 'isDeleted':1,\n",
139
+ " 'isActive':1,\n",
140
+ " 'islive':1,\n",
141
+ " 'name':1,\n",
142
+ " 'city':1,\n",
143
+ " 'ref_id':1,\n",
144
+ " 'min_delivery_time':1,\n",
145
+ " 'min_order_value':1,\n",
146
+ " 'platform_data':1,\n",
147
+ " 'status':1,\n",
148
+ " 'brandId':1,\n",
149
+ " 'userId':1,\n",
150
+ " 'region':1,\n",
151
+ " 'createdAt':1,\n",
152
+ " 'updatedAt':1,\n",
153
+ " 'platform_actions':1,\n",
154
+ " 'email':1,\n",
155
+ " 'customQuantityEnabled':1,\n",
156
+ " 'itemPriceIncludesTax':1,\n",
157
+ " 'isSahlOrder':1,\n",
158
+ "}"
159
+ ],
160
+ "metadata": {
161
+ "id": "IVmLyP4XgJS5"
162
+ },
163
+ "execution_count": null,
164
+ "outputs": []
165
+ },
166
+ {
167
+ "cell_type": "code",
168
+ "source": [
169
+ "brands_projection ={\n",
170
+ " '_id':1,\n",
171
+ " 'isApproved':1,\n",
172
+ " 'name':1,\n",
173
+ " 'key':1,\n",
174
+ " 'createdAt':1,\n",
175
+ " 'updatedAt':1,\n",
176
+ " 'region':1,\n",
177
+ " 'countryCode':1,\n",
178
+ " 'currency':1\n",
179
+ "}"
180
+ ],
181
+ "metadata": {
182
+ "id": "I5MTX0w2gMEP"
183
+ },
184
+ "execution_count": null,
185
+ "outputs": []
186
+ },
187
+ {
188
+ "cell_type": "code",
189
+ "source": [
190
+ "orders_data = list(orders_collection.find({},orders_projection))"
191
+ ],
192
+ "metadata": {
193
+ "id": "BajNx3y8gNgp"
194
+ },
195
+ "execution_count": null,
196
+ "outputs": []
197
+ },
198
+ {
199
+ "cell_type": "code",
200
+ "source": [
201
+ "stores_data = list(stores_collection.find({},stores_projection))"
202
+ ],
203
+ "metadata": {
204
+ "id": "oNxAB6pRgQJ2"
205
+ },
206
+ "execution_count": null,
207
+ "outputs": []
208
+ },
209
+ {
210
+ "cell_type": "code",
211
+ "source": [
212
+ "store_dict = {str(store['_id']): store.get('name',\"not_found\") for store in stores_data}"
213
+ ],
214
+ "metadata": {
215
+ "id": "oU2-HkFGmZMw"
216
+ },
217
+ "execution_count": null,
218
+ "outputs": []
219
+ },
220
+ {
221
+ "cell_type": "code",
222
+ "source": [
223
+ "from bson.objectid import ObjectId\n",
224
+ "import math"
225
+ ],
226
+ "metadata": {
227
+ "id": "-DdVDs98nv6P"
228
+ },
229
+ "execution_count": null,
230
+ "outputs": []
231
+ },
232
+ {
233
+ "cell_type": "code",
234
+ "source": [
235
+ "from datetime import datetime"
236
+ ],
237
+ "metadata": {
238
+ "id": "lAY9cWZvwJXC"
239
+ },
240
+ "execution_count": null,
241
+ "outputs": []
242
+ },
243
+ {
244
+ "cell_type": "code",
245
+ "source": [
246
+ "brands_data = list(brands_collection.find({},brands_projection))"
247
+ ],
248
+ "metadata": {
249
+ "id": "14QKsF7UgQaA"
250
+ },
251
+ "execution_count": null,
252
+ "outputs": []
253
+ },
254
+ {
255
+ "cell_type": "code",
256
+ "source": [
257
+ "orders_details = []\n",
258
+ "for order in orders_data:\n",
259
+ " try:\n",
260
+ " order_created_at = order.get('orderCreatedAt')\n",
261
+ " timestamp_s = order_created_at / 1000\n",
262
+ " dt = datetime.fromtimestamp(timestamp_s)\n",
263
+ "\n",
264
+ " date_only = dt.date()\n",
265
+ " except:\n",
266
+ " continue\n",
267
+ " order_detail = {\n",
268
+ " 'date': date_only,\n",
269
+ " 'store_id': str(order['store_id']),\n",
270
+ " 'order_subtotal': order['data'][0]['order']['details']['order_total']\n",
271
+ " }\n",
272
+ " orders_details.append(order_detail)"
273
+ ],
274
+ "metadata": {
275
+ "id": "tsNf1YdjwHIV"
276
+ },
277
+ "execution_count": null,
278
+ "outputs": []
279
+ },
280
+ {
281
+ "cell_type": "code",
282
+ "source": [
283
+ "from collections import defaultdict"
284
+ ],
285
+ "metadata": {
286
+ "id": "J4OofItQwmzm"
287
+ },
288
+ "execution_count": null,
289
+ "outputs": []
290
+ },
291
+ {
292
+ "cell_type": "code",
293
+ "source": [
294
+ "store_order_counts = defaultdict(lambda: defaultdict(int))\n",
295
+ "\n",
296
+ "# Populate the result dictionary\n",
297
+ "for order in orders_details:\n",
298
+ " store_id = order['store_id']\n",
299
+ " store_name = store_dict.get(store_id)\n",
300
+ " if store_name:\n",
301
+ " date_str = order['date'].strftime('%Y-%m-%d') # Format date as string\n",
302
+ "\n",
303
+ " # Increment the count for the given store and date\n",
304
+ " store_order_counts[store_name][date_str] += 1\n",
305
+ "\n",
306
+ "# Convert defaultdict to dict for readability\n",
307
+ "store_order_counts = {store: dict(dates) for store, dates in store_order_counts.items()}\n"
308
+ ],
309
+ "metadata": {
310
+ "id": "QfQl1zovwKzo"
311
+ },
312
+ "execution_count": null,
313
+ "outputs": []
314
+ },
315
+ {
316
+ "cell_type": "code",
317
+ "source": [
318
+ "store_order_counts"
319
+ ],
320
+ "metadata": {
321
+ "colab": {
322
+ "base_uri": "https://localhost:8080/"
323
+ },
324
+ "id": "dCR3LYnxwQxb",
325
+ "outputId": "5cf749ca-7e50-4262-ee84-91b2013cca0a"
326
+ },
327
+ "execution_count": null,
328
+ "outputs": [
329
+ {
330
+ "output_type": "execute_result",
331
+ "data": {
332
+ "text/plain": [
333
+ "{'WeDesi': {'2024-01-05': 2, '2024-02-06': 3},\n",
334
+ " '8 miles': {'2024-01-06': 3,\n",
335
+ " '2024-01-08': 24,\n",
336
+ " '2024-01-09': 13,\n",
337
+ " '2024-01-10': 1,\n",
338
+ " '2024-01-11': 9,\n",
339
+ " '2024-01-25': 1,\n",
340
+ " '2024-02-06': 5,\n",
341
+ " '2024-02-21': 1,\n",
342
+ " '2024-03-12': 3,\n",
343
+ " '2024-05-31': 16,\n",
344
+ " '2024-06-05': 8},\n",
345
+ " 'lebanese-house-dubai': {'2024-01-08': 20,\n",
346
+ " '2024-01-09': 4,\n",
347
+ " '2024-01-10': 1,\n",
348
+ " '2024-01-11': 55,\n",
349
+ " '2024-01-12': 5,\n",
350
+ " '2024-01-17': 1,\n",
351
+ " '2024-01-23': 6,\n",
352
+ " '2024-01-24': 1,\n",
353
+ " '2024-02-01': 1,\n",
354
+ " '2024-02-05': 10,\n",
355
+ " '2024-02-06': 2},\n",
356
+ " 'Cedar Restaurant & Grill': {'2024-01-08': 8, '2024-02-06': 1},\n",
357
+ " 'LEMAR Resturant & Cafe': {'2024-01-08': 5,\n",
358
+ " '2024-01-09': 33,\n",
359
+ " '2024-01-10': 59,\n",
360
+ " '2024-01-11': 17,\n",
361
+ " '2024-01-12': 25,\n",
362
+ " '2024-01-16': 57,\n",
363
+ " '2024-01-17': 44,\n",
364
+ " '2024-01-18': 1,\n",
365
+ " '2024-01-19': 4,\n",
366
+ " '2024-01-20': 2,\n",
367
+ " '2024-01-21': 5,\n",
368
+ " '2024-01-22': 1,\n",
369
+ " '2024-01-23': 1,\n",
370
+ " '2024-01-24': 3,\n",
371
+ " '2024-01-25': 3,\n",
372
+ " '2024-01-26': 3,\n",
373
+ " '2024-01-29': 2,\n",
374
+ " '2024-01-30': 2,\n",
375
+ " '2024-01-31': 2,\n",
376
+ " '2024-02-01': 2,\n",
377
+ " '2024-02-05': 2,\n",
378
+ " '2024-03-15': 1,\n",
379
+ " '2024-03-22': 6,\n",
380
+ " '2024-05-10': 10},\n",
381
+ " 'test foir ai': {'2024-01-08': 1,\n",
382
+ " '2024-01-18': 1,\n",
383
+ " '2024-01-20': 1,\n",
384
+ " '2024-01-31': 30,\n",
385
+ " '2024-02-01': 100,\n",
386
+ " '2024-02-08': 8,\n",
387
+ " '2024-07-10': 1},\n",
388
+ " 'Apna Punjab Paratha': {'2024-01-09': 3,\n",
389
+ " '2024-01-10': 1,\n",
390
+ " '2024-01-11': 35,\n",
391
+ " '2024-01-12': 7,\n",
392
+ " '2024-01-16': 57,\n",
393
+ " '2024-01-17': 12,\n",
394
+ " '2024-01-18': 39,\n",
395
+ " '2024-01-19': 15,\n",
396
+ " '2024-01-20': 14,\n",
397
+ " '2024-01-22': 15,\n",
398
+ " '2024-01-23': 2,\n",
399
+ " '2024-01-24': 22,\n",
400
+ " '2024-01-25': 9,\n",
401
+ " '2024-01-27': 1,\n",
402
+ " '2024-01-29': 6,\n",
403
+ " '2024-01-30': 1,\n",
404
+ " '2024-01-31': 7,\n",
405
+ " '2024-02-01': 5,\n",
406
+ " '2024-02-02': 3,\n",
407
+ " '2024-02-03': 21,\n",
408
+ " '2024-02-05': 16,\n",
409
+ " '2024-02-06': 35,\n",
410
+ " '2024-02-07': 10,\n",
411
+ " '2024-02-08': 3,\n",
412
+ " '2024-02-09': 11,\n",
413
+ " '2024-02-10': 5,\n",
414
+ " '2024-02-12': 1,\n",
415
+ " '2024-02-13': 6,\n",
416
+ " '2024-02-14': 18,\n",
417
+ " '2024-02-15': 13,\n",
418
+ " '2024-02-16': 11,\n",
419
+ " '2024-02-19': 1,\n",
420
+ " '2024-02-21': 8,\n",
421
+ " '2024-02-22': 15,\n",
422
+ " '2024-02-23': 4,\n",
423
+ " '2024-02-26': 6,\n",
424
+ " '2024-02-27': 6,\n",
425
+ " '2024-02-28': 4,\n",
426
+ " '2024-02-29': 2,\n",
427
+ " '2024-03-01': 10,\n",
428
+ " '2024-03-02': 1,\n",
429
+ " '2024-03-11': 3,\n",
430
+ " '2024-03-12': 4,\n",
431
+ " '2024-03-14': 1,\n",
432
+ " '2024-03-15': 12,\n",
433
+ " '2024-03-19': 17,\n",
434
+ " '2024-03-20': 19,\n",
435
+ " '2024-03-21': 5,\n",
436
+ " '2024-03-24': 1,\n",
437
+ " '2024-03-25': 1,\n",
438
+ " '2024-03-28': 6,\n",
439
+ " '2024-03-29': 4,\n",
440
+ " '2024-04-01': 43,\n",
441
+ " '2024-04-02': 57,\n",
442
+ " '2024-04-03': 39,\n",
443
+ " '2024-04-04': 34,\n",
444
+ " '2024-04-05': 7,\n",
445
+ " '2024-04-08': 2,\n",
446
+ " '2024-04-09': 8,\n",
447
+ " '2024-04-10': 7,\n",
448
+ " '2024-04-11': 4,\n",
449
+ " '2024-04-12': 2,\n",
450
+ " '2024-04-16': 3,\n",
451
+ " '2024-05-03': 6,\n",
452
+ " '2024-05-06': 12,\n",
453
+ " '2024-05-10': 11,\n",
454
+ " '2024-05-13': 24,\n",
455
+ " '2024-05-14': 2,\n",
456
+ " '2024-05-15': 1,\n",
457
+ " '2024-05-17': 17,\n",
458
+ " '2024-05-18': 11,\n",
459
+ " '2024-05-20': 14,\n",
460
+ " '2024-05-24': 25,\n",
461
+ " '2024-05-27': 1,\n",
462
+ " '2024-05-28': 1,\n",
463
+ " '2024-06-03': 1,\n",
464
+ " '2024-06-05': 11,\n",
465
+ " '2024-06-06': 4,\n",
466
+ " '2024-06-07': 2,\n",
467
+ " '2024-06-20': 1,\n",
468
+ " '2024-07-31': 4},\n",
469
+ " 'Verjus Bites': {'2024-01-09': 2, '2024-01-11': 1, '2024-01-12': 7},\n",
470
+ " 'Super Foods': {'2024-01-10': 10,\n",
471
+ " '2024-01-11': 1,\n",
472
+ " '2024-01-16': 9,\n",
473
+ " '2024-01-18': 4},\n",
474
+ " 'Baba Grill Restauarnt': {'2024-01-27': 1},\n",
475
+ " 'Makaran': {'2024-02-05': 13,\n",
476
+ " '2024-02-06': 24,\n",
477
+ " '2024-02-07': 47,\n",
478
+ " '2024-02-08': 70,\n",
479
+ " '2024-02-09': 24,\n",
480
+ " '2024-02-10': 7,\n",
481
+ " '2024-02-12': 48,\n",
482
+ " '2024-02-13': 34,\n",
483
+ " '2024-02-14': 22,\n",
484
+ " '2024-02-15': 11,\n",
485
+ " '2024-02-16': 1,\n",
486
+ " '2024-02-19': 1,\n",
487
+ " '2024-02-20': 8,\n",
488
+ " '2024-02-21': 1,\n",
489
+ " '2024-02-22': 10,\n",
490
+ " '2024-02-23': 23,\n",
491
+ " '2024-02-26': 1,\n",
492
+ " '2024-02-27': 11,\n",
493
+ " '2024-02-28': 1,\n",
494
+ " '2024-02-29': 10,\n",
495
+ " '2024-03-01': 54,\n",
496
+ " '2024-03-02': 45,\n",
497
+ " '2024-03-05': 35,\n",
498
+ " '2024-03-06': 3,\n",
499
+ " '2024-03-08': 5,\n",
500
+ " '2024-03-09': 10,\n",
501
+ " '2024-03-11': 11,\n",
502
+ " '2024-03-12': 14,\n",
503
+ " '2024-03-14': 28,\n",
504
+ " '2024-03-15': 14,\n",
505
+ " '2024-03-20': 28,\n",
506
+ " '2024-03-21': 7,\n",
507
+ " '2024-03-26': 2,\n",
508
+ " '2024-03-27': 4,\n",
509
+ " '2024-03-28': 5,\n",
510
+ " '2024-04-04': 29,\n",
511
+ " '2024-04-05': 5,\n",
512
+ " '2024-04-06': 1,\n",
513
+ " '2024-04-09': 14,\n",
514
+ " '2024-04-10': 4,\n",
515
+ " '2024-04-11': 10,\n",
516
+ " '2024-04-12': 4,\n",
517
+ " '2024-04-15': 13,\n",
518
+ " '2024-04-17': 3,\n",
519
+ " '2024-04-18': 3,\n",
520
+ " '2024-04-23': 6,\n",
521
+ " '2024-04-24': 7,\n",
522
+ " '2024-04-26': 7,\n",
523
+ " '2024-04-29': 6,\n",
524
+ " '2024-04-30': 23,\n",
525
+ " '2024-05-01': 6,\n",
526
+ " '2024-05-02': 1,\n",
527
+ " '2024-05-09': 2,\n",
528
+ " '2024-05-10': 5,\n",
529
+ " '2024-05-13': 1,\n",
530
+ " '2024-05-14': 2,\n",
531
+ " '2024-05-27': 9,\n",
532
+ " '2024-05-28': 13,\n",
533
+ " '2024-05-29': 6,\n",
534
+ " '2024-05-30': 10,\n",
535
+ " '2024-06-03': 16,\n",
536
+ " '2024-06-04': 14,\n",
537
+ " '2024-06-05': 3,\n",
538
+ " '2024-06-10': 1,\n",
539
+ " '2024-06-11': 2,\n",
540
+ " '2024-06-14': 1,\n",
541
+ " '2024-06-18': 1,\n",
542
+ " '2024-07-11': 1},\n",
543
+ " 'Kutumbari Resturant': {'2024-02-05': 10,\n",
544
+ " '2024-02-06': 26,\n",
545
+ " '2024-02-07': 1,\n",
546
+ " '2024-02-09': 9,\n",
547
+ " '2024-02-10': 3,\n",
548
+ " '2024-02-11': 2,\n",
549
+ " '2024-02-12': 10,\n",
550
+ " '2024-02-13': 24,\n",
551
+ " '2024-02-14': 10,\n",
552
+ " '2024-02-15': 12,\n",
553
+ " '2024-02-17': 1,\n",
554
+ " '2024-02-19': 1,\n",
555
+ " '2024-02-20': 9,\n",
556
+ " '2024-02-21': 19,\n",
557
+ " '2024-02-22': 7,\n",
558
+ " '2024-02-23': 24,\n",
559
+ " '2024-02-26': 60,\n",
560
+ " '2024-02-27': 30,\n",
561
+ " '2024-02-28': 50,\n",
562
+ " '2024-02-29': 24,\n",
563
+ " '2024-03-01': 27,\n",
564
+ " '2024-03-02': 10,\n",
565
+ " '2024-03-03': 2,\n",
566
+ " '2024-03-04': 25,\n",
567
+ " '2024-03-05': 26,\n",
568
+ " '2024-03-06': 4,\n",
569
+ " '2024-03-08': 39,\n",
570
+ " '2024-03-11': 23,\n",
571
+ " '2024-03-12': 15,\n",
572
+ " '2024-03-21': 13,\n",
573
+ " '2024-03-22': 3,\n",
574
+ " '2024-03-26': 7,\n",
575
+ " '2024-03-27': 35,\n",
576
+ " '2024-03-28': 35,\n",
577
+ " '2024-03-29': 6,\n",
578
+ " '2024-04-03': 1,\n",
579
+ " '2024-04-11': 9,\n",
580
+ " '2024-04-12': 3,\n",
581
+ " '2024-04-15': 1,\n",
582
+ " '2024-04-16': 3,\n",
583
+ " '2024-04-22': 1,\n",
584
+ " '2024-04-23': 1,\n",
585
+ " '2024-04-24': 2,\n",
586
+ " '2024-04-25': 1,\n",
587
+ " '2024-04-26': 39,\n",
588
+ " '2024-04-29': 3,\n",
589
+ " '2024-04-30': 9,\n",
590
+ " '2024-05-30': 10,\n",
591
+ " '2024-05-31': 22,\n",
592
+ " '2024-06-03': 24,\n",
593
+ " '2024-06-04': 19,\n",
594
+ " '2024-06-05': 7,\n",
595
+ " '2024-06-06': 1,\n",
596
+ " '2024-06-07': 15,\n",
597
+ " '2024-06-10': 12,\n",
598
+ " '2024-06-11': 39,\n",
599
+ " '2024-06-12': 33,\n",
600
+ " '2024-06-13': 16,\n",
601
+ " '2024-06-14': 70,\n",
602
+ " '2024-06-15': 59,\n",
603
+ " '2024-06-17': 6,\n",
604
+ " '2024-06-18': 5,\n",
605
+ " '2024-06-19': 3,\n",
606
+ " '2024-06-21': 4,\n",
607
+ " '2024-06-24': 11,\n",
608
+ " '2024-06-27': 1,\n",
609
+ " '2024-07-01': 1,\n",
610
+ " '2024-07-02': 1,\n",
611
+ " '2024-07-03': 11,\n",
612
+ " '2024-07-04': 15,\n",
613
+ " '2024-07-05': 4,\n",
614
+ " '2024-07-08': 9,\n",
615
+ " '2024-07-09': 6,\n",
616
+ " '2024-07-11': 19,\n",
617
+ " '2024-07-12': 2,\n",
618
+ " '2024-07-22': 2,\n",
619
+ " '2024-07-23': 8,\n",
620
+ " '2024-07-26': 7,\n",
621
+ " '2024-07-29': 4},\n",
622
+ " 'The Barnyard Co': {'2024-02-07': 2,\n",
623
+ " '2024-02-08': 6,\n",
624
+ " '2024-02-19': 3,\n",
625
+ " '2024-02-20': 8},\n",
626
+ " 'We Desi 2': {'2024-02-09': 27, '2024-02-10': 4, '2024-02-12': 1},\n",
627
+ " 'Golden Chicken': {'2024-02-13': 1,\n",
628
+ " '2024-02-27': 10,\n",
629
+ " '2024-02-28': 21,\n",
630
+ " '2024-03-01': 29,\n",
631
+ " '2024-03-02': 1},\n",
632
+ " 'Sushiz Chinese': {'2024-02-15': 10,\n",
633
+ " '2024-02-16': 4,\n",
634
+ " '2024-02-26': 5,\n",
635
+ " '2024-06-05': 2},\n",
636
+ " 'Baba Grill Restaurant': {'2024-02-15': 4, '2024-07-03': 1},\n",
637
+ " 'Monasabat jibla': {'2024-02-19': 3},\n",
638
+ " 'Betawi-JLT': {'2024-02-19': 39,\n",
639
+ " '2024-02-20': 56,\n",
640
+ " '2024-02-21': 31,\n",
641
+ " '2024-02-22': 48,\n",
642
+ " '2024-02-23': 37,\n",
643
+ " '2024-02-24': 104,\n",
644
+ " '2024-02-25': 15,\n",
645
+ " '2024-02-26': 72,\n",
646
+ " '2024-02-27': 28,\n",
647
+ " '2024-02-28': 35,\n",
648
+ " '2024-02-29': 127,\n",
649
+ " '2024-03-01': 105,\n",
650
+ " '2024-03-02': 44,\n",
651
+ " '2024-03-04': 62,\n",
652
+ " '2024-03-06': 112,\n",
653
+ " '2024-03-08': 201,\n",
654
+ " '2024-03-09': 17,\n",
655
+ " '2024-03-11': 87,\n",
656
+ " '2024-03-12': 65,\n",
657
+ " '2024-03-13': 91,\n",
658
+ " '2024-03-14': 115,\n",
659
+ " '2024-03-15': 104,\n",
660
+ " '2024-03-16': 29,\n",
661
+ " '2024-03-17': 22,\n",
662
+ " '2024-03-18': 58,\n",
663
+ " '2024-03-19': 38,\n",
664
+ " '2024-03-20': 134,\n",
665
+ " '2024-03-21': 89,\n",
666
+ " '2024-03-22': 73,\n",
667
+ " '2024-03-23': 10,\n",
668
+ " '2024-03-26': 40,\n",
669
+ " '2024-03-27': 389,\n",
670
+ " '2024-03-28': 60,\n",
671
+ " '2024-03-29': 63,\n",
672
+ " '2024-03-30': 4,\n",
673
+ " '2024-04-01': 32,\n",
674
+ " '2024-04-02': 61,\n",
675
+ " '2024-04-03': 52,\n",
676
+ " '2024-04-04': 39,\n",
677
+ " '2024-04-05': 5,\n",
678
+ " '2024-04-06': 2,\n",
679
+ " '2024-04-07': 2,\n",
680
+ " '2024-04-08': 49,\n",
681
+ " '2024-04-09': 60,\n",
682
+ " '2024-04-10': 6,\n",
683
+ " '2024-04-11': 42,\n",
684
+ " '2024-04-12': 26,\n",
685
+ " '2024-04-15': 4,\n",
686
+ " '2024-04-16': 33,\n",
687
+ " '2024-04-17': 95,\n",
688
+ " '2024-04-18': 73,\n",
689
+ " '2024-04-19': 117,\n",
690
+ " '2024-04-21': 7,\n",
691
+ " '2024-04-22': 16,\n",
692
+ " '2024-04-23': 80,\n",
693
+ " '2024-04-24': 41,\n",
694
+ " '2024-04-25': 22,\n",
695
+ " '2024-04-26': 15,\n",
696
+ " '2024-04-27': 18,\n",
697
+ " '2024-04-28': 1,\n",
698
+ " '2024-04-29': 33,\n",
699
+ " '2024-04-30': 48,\n",
700
+ " '2024-05-01': 108,\n",
701
+ " '2024-05-02': 136,\n",
702
+ " '2024-05-03': 170,\n",
703
+ " '2024-05-04': 144,\n",
704
+ " '2024-05-06': 73,\n",
705
+ " '2024-05-07': 112,\n",
706
+ " '2024-05-08': 2,\n",
707
+ " '2024-05-09': 63,\n",
708
+ " '2024-05-10': 35,\n",
709
+ " '2024-05-13': 78,\n",
710
+ " '2024-05-14': 30,\n",
711
+ " '2024-05-15': 27,\n",
712
+ " '2024-05-16': 20,\n",
713
+ " '2024-05-17': 74,\n",
714
+ " '2024-05-20': 52,\n",
715
+ " '2024-05-21': 29,\n",
716
+ " '2024-05-22': 66,\n",
717
+ " '2024-05-23': 51,\n",
718
+ " '2024-05-24': 20,\n",
719
+ " '2024-05-25': 3,\n",
720
+ " '2024-05-26': 2,\n",
721
+ " '2024-06-04': 20,\n",
722
+ " '2024-06-06': 2,\n",
723
+ " '2024-06-07': 14,\n",
724
+ " '2024-06-08': 3,\n",
725
+ " '2024-06-11': 54,\n",
726
+ " '2024-06-12': 33,\n",
727
+ " '2024-06-13': 63,\n",
728
+ " '2024-06-14': 42,\n",
729
+ " '2024-06-17': 10,\n",
730
+ " '2024-06-18': 19,\n",
731
+ " '2024-06-19': 88,\n",
732
+ " '2024-06-20': 73,\n",
733
+ " '2024-06-21': 20,\n",
734
+ " '2024-06-24': 51,\n",
735
+ " '2024-06-25': 11,\n",
736
+ " '2024-06-26': 4,\n",
737
+ " '2024-06-27': 10,\n",
738
+ " '2024-06-28': 3,\n",
739
+ " '2024-06-29': 3,\n",
740
+ " '2024-07-01': 6,\n",
741
+ " '2024-07-02': 36,\n",
742
+ " '2024-07-03': 12,\n",
743
+ " '2024-07-04': 7,\n",
744
+ " '2024-07-05': 32,\n",
745
+ " '2024-07-08': 25,\n",
746
+ " '2024-07-09': 15,\n",
747
+ " '2024-07-10': 22,\n",
748
+ " '2024-07-11': 13,\n",
749
+ " '2024-07-12': 97,\n",
750
+ " '2024-07-15': 48,\n",
751
+ " '2024-07-16': 37,\n",
752
+ " '2024-07-17': 16,\n",
753
+ " '2024-07-18': 33,\n",
754
+ " '2024-07-19': 18,\n",
755
+ " '2024-07-22': 76,\n",
756
+ " '2024-07-23': 16,\n",
757
+ " '2024-07-24': 16,\n",
758
+ " '2024-07-25': 50,\n",
759
+ " '2024-07-26': 7,\n",
760
+ " '2024-07-29': 39,\n",
761
+ " '2024-07-30': 105,\n",
762
+ " '2024-07-31': 141,\n",
763
+ " '2024-08-01': 21},\n",
764
+ " 'Hunnybun': {'2024-02-20': 1},\n",
765
+ " 'DHANVI ICE CREAM PARLOUR': {'2024-02-22': 1},\n",
766
+ " 'Toranj Restaurant': {'2024-02-29': 3,\n",
767
+ " '2024-03-15': 1,\n",
768
+ " '2024-04-17': 35,\n",
769
+ " '2024-06-05': 3,\n",
770
+ " '2024-06-06': 1},\n",
771
+ " 'SAHL Restaurant': {'2024-02-29': 6,\n",
772
+ " '2024-05-14': 24,\n",
773
+ " '2024-05-15': 4,\n",
774
+ " '2024-07-01': 1},\n",
775
+ " 'Gravies': {'2024-03-01': 1, '2024-03-05': 1},\n",
776
+ " 'Atlass one': {'2024-03-04': 5,\n",
777
+ " '2024-03-05': 1,\n",
778
+ " '2024-03-06': 3,\n",
779
+ " '2024-03-08': 12,\n",
780
+ " '2024-03-11': 3,\n",
781
+ " '2024-03-12': 33,\n",
782
+ " '2024-03-13': 25,\n",
783
+ " '2024-03-14': 10,\n",
784
+ " '2024-03-15': 17,\n",
785
+ " '2024-03-18': 10,\n",
786
+ " '2024-03-19': 9,\n",
787
+ " '2024-03-20': 12,\n",
788
+ " '2024-03-26': 13,\n",
789
+ " '2024-03-27': 1,\n",
790
+ " '2024-03-28': 37,\n",
791
+ " '2024-03-29': 15,\n",
792
+ " '2024-04-01': 15,\n",
793
+ " '2024-04-02': 28,\n",
794
+ " '2024-04-03': 3,\n",
795
+ " '2024-04-04': 26,\n",
796
+ " '2024-04-05': 22,\n",
797
+ " '2024-04-06': 10,\n",
798
+ " '2024-04-08': 9,\n",
799
+ " '2024-04-09': 11,\n",
800
+ " '2024-04-10': 19,\n",
801
+ " '2024-04-11': 33,\n",
802
+ " '2024-04-12': 12,\n",
803
+ " '2024-04-15': 18,\n",
804
+ " '2024-04-16': 5,\n",
805
+ " '2024-04-17': 14,\n",
806
+ " '2024-04-18': 11,\n",
807
+ " '2024-04-19': 15,\n",
808
+ " '2024-04-24': 22,\n",
809
+ " '2024-04-25': 14},\n",
810
+ " 'Wrap and Dip Cafe': {'2024-03-08': 3,\n",
811
+ " '2024-03-13': 5,\n",
812
+ " '2024-03-14': 4,\n",
813
+ " '2024-03-18': 2,\n",
814
+ " '2024-03-22': 2,\n",
815
+ " '2024-04-02': 6,\n",
816
+ " '2024-04-19': 7,\n",
817
+ " '2024-04-22': 14,\n",
818
+ " '2024-04-23': 2,\n",
819
+ " '2024-04-29': 6},\n",
820
+ " 'Desibites Restaurant': {'2024-03-13': 7},\n",
821
+ " 'Challet Grill Plus - Al Barsha 1': {'2024-03-13': 7,\n",
822
+ " '2024-03-22': 6,\n",
823
+ " '2024-04-17': 5,\n",
824
+ " '2024-05-14': 3},\n",
825
+ " 'Novokich Restaurant': {'2024-03-19': 1},\n",
826
+ " 'Mexican Express - Deira': {'2024-03-27': 2,\n",
827
+ " '2024-03-29': 4,\n",
828
+ " '2024-04-02': 3},\n",
829
+ " 'Jbroz-Union Road': {'2024-04-01': 2},\n",
830
+ " 'Chopes n Hopes': {'2024-04-04': 13, '2024-07-21': 1},\n",
831
+ " 'Bondhu Restaurant': {'2024-04-10': 8,\n",
832
+ " '2024-04-11': 47,\n",
833
+ " '2024-04-12': 63,\n",
834
+ " '2024-04-15': 11,\n",
835
+ " '2024-04-22': 1},\n",
836
+ " 'Paratha Central': {'2024-04-17': 200,\n",
837
+ " '2024-04-19': 8,\n",
838
+ " '2024-04-22': 19,\n",
839
+ " '2024-04-23': 17,\n",
840
+ " '2024-04-24': 22,\n",
841
+ " '2024-04-25': 5,\n",
842
+ " '2024-04-30': 1,\n",
843
+ " '2024-05-01': 2,\n",
844
+ " '2024-05-02': 9,\n",
845
+ " '2024-05-03': 3,\n",
846
+ " '2024-05-06': 4,\n",
847
+ " '2024-05-07': 2,\n",
848
+ " '2024-05-08': 8,\n",
849
+ " '2024-05-09': 4,\n",
850
+ " '2024-05-10': 4,\n",
851
+ " '2024-05-14': 1,\n",
852
+ " '2024-05-15': 1,\n",
853
+ " '2024-05-16': 7},\n",
854
+ " 'Just Vada Pav - Jumeirah Village Circle': {'2024-04-26': 1,\n",
855
+ " '2024-05-01': 2,\n",
856
+ " '2024-05-16': 7,\n",
857
+ " '2024-05-17': 29,\n",
858
+ " '2024-05-22': 29,\n",
859
+ " '2024-06-18': 1,\n",
860
+ " '2024-07-01': 1,\n",
861
+ " '2024-07-03': 29,\n",
862
+ " '2024-07-04': 19,\n",
863
+ " '2024-07-05': 28,\n",
864
+ " '2024-07-08': 21,\n",
865
+ " '2024-07-09': 5,\n",
866
+ " '2024-07-11': 1,\n",
867
+ " '2024-07-12': 12,\n",
868
+ " '2024-07-15': 3,\n",
869
+ " '2024-07-16': 6,\n",
870
+ " '2024-07-17': 5,\n",
871
+ " '2024-07-18': 2,\n",
872
+ " '2024-07-19': 14,\n",
873
+ " '2024-07-22': 11,\n",
874
+ " '2024-07-23': 13,\n",
875
+ " '2024-07-24': 9,\n",
876
+ " '2024-07-25': 3,\n",
877
+ " '2024-07-26': 3,\n",
878
+ " '2024-07-29': 23,\n",
879
+ " '2024-07-30': 13,\n",
880
+ " '2024-07-31': 16,\n",
881
+ " '2024-08-01': 1},\n",
882
+ " 'Deccan Delight': {'2024-05-01': 68,\n",
883
+ " '2024-05-21': 2,\n",
884
+ " '2024-05-27': 1,\n",
885
+ " '2024-05-28': 6},\n",
886
+ " 'Koshari & Pastry Afandina': {'2024-05-15': 2,\n",
887
+ " '2024-06-07': 5,\n",
888
+ " '2024-06-10': 2,\n",
889
+ " '2024-06-12': 11,\n",
890
+ " '2024-06-13': 3,\n",
891
+ " '2024-06-14': 17,\n",
892
+ " '2024-06-17': 2},\n",
893
+ " 'Just Vada Pav - Discovery Garden': {'2024-05-17': 16,\n",
894
+ " '2024-05-22': 31,\n",
895
+ " '2024-06-06': 1,\n",
896
+ " '2024-07-05': 6,\n",
897
+ " '2024-07-12': 1,\n",
898
+ " '2024-07-22': 1,\n",
899
+ " '2024-07-23': 1},\n",
900
+ " 'Just Vada Pav - Al Karama': {'2024-05-17': 7,\n",
901
+ " '2024-05-22': 30,\n",
902
+ " '2024-06-04': 5,\n",
903
+ " '2024-06-10': 1,\n",
904
+ " '2024-06-11': 70,\n",
905
+ " '2024-06-12': 8,\n",
906
+ " '2024-06-24': 3,\n",
907
+ " '2024-06-25': 1,\n",
908
+ " '2024-07-09': 2,\n",
909
+ " '2024-07-11': 1,\n",
910
+ " '2024-07-15': 12,\n",
911
+ " '2024-07-22': 2,\n",
912
+ " '2024-07-30': 1},\n",
913
+ " \"Joy's Pasta- Business Bay\": {'2024-06-04': 4,\n",
914
+ " '2024-06-05': 2,\n",
915
+ " '2024-06-06': 1,\n",
916
+ " '2024-06-10': 1,\n",
917
+ " '2024-06-11': 17,\n",
918
+ " '2024-06-13': 1,\n",
919
+ " '2024-06-14': 3,\n",
920
+ " '2024-06-17': 5,\n",
921
+ " '2024-06-18': 2,\n",
922
+ " '2024-06-25': 26,\n",
923
+ " '2024-06-26': 1,\n",
924
+ " '2024-07-08': 2,\n",
925
+ " '2024-07-09': 3},\n",
926
+ " 'J5 Restaurant': {'2024-06-04': 5},\n",
927
+ " 'Fruit time- Al Barsha': {'2024-06-05': 8,\n",
928
+ " '2024-06-06': 3,\n",
929
+ " '2024-06-11': 8,\n",
930
+ " '2024-06-12': 44,\n",
931
+ " '2024-06-13': 13,\n",
932
+ " '2024-06-14': 30,\n",
933
+ " '2024-06-15': 13,\n",
934
+ " '2024-06-18': 31,\n",
935
+ " '2024-06-19': 1,\n",
936
+ " '2024-06-20': 19,\n",
937
+ " '2024-06-21': 8,\n",
938
+ " '2024-06-24': 5,\n",
939
+ " '2024-06-25': 2,\n",
940
+ " '2024-06-28': 2,\n",
941
+ " '2024-07-23': 1},\n",
942
+ " 'Spinzer Express Restaurant -Al QUOZ 3': {'2024-06-05': 1,\n",
943
+ " '2024-06-06': 3,\n",
944
+ " '2024-06-17': 3,\n",
945
+ " '2024-06-27': 1,\n",
946
+ " '2024-06-28': 2,\n",
947
+ " '2024-07-25': 3,\n",
948
+ " '2024-07-29': 9},\n",
949
+ " 'joys - hessa street': {'2024-06-17': 1,\n",
950
+ " '2024-06-18': 8,\n",
951
+ " '2024-06-25': 37,\n",
952
+ " '2024-06-26': 6,\n",
953
+ " '2024-07-08': 8,\n",
954
+ " '2024-07-09': 3},\n",
955
+ " 'mt store': {'2024-06-20': 1, '2024-07-12': 1},\n",
956
+ " 'Just Curry Restaurant': {'2024-07-09': 3},\n",
957
+ " 'test 1': {'2024-07-11': 1, '2024-07-16': 2}}"
958
+ ]
959
+ },
960
+ "metadata": {},
961
+ "execution_count": 44
962
+ }
963
+ ]
964
+ },
965
+ {
966
+ "cell_type": "code",
967
+ "source": [],
968
+ "metadata": {
969
+ "id": "Cfncdn8OwoZx"
970
+ },
971
+ "execution_count": null,
972
+ "outputs": []
973
+ }
974
+ ]
975
+ }
notebooks/store_daywise_reveneu_main.ipynb ADDED
The diff for this file is too large to render. See raw diff
 
src/__init__.py ADDED
File without changes
src/__pycache__/__init__.cpython-312.pyc ADDED
Binary file (160 Bytes). View file
 
src/__pycache__/utils.cpython-312.pyc ADDED
Binary file (1.06 kB). View file
 
src/json_creation/__init__.py ADDED
@@ -0,0 +1,5 @@
 
 
 
 
 
 
1
+ from .json_schema import json_schema # noqa
2
+ from .app import final_counts #noqa
3
+ from .archive_orders import fetch_response #noqa
4
+ from .ollama_tool_calling import OllamaContextQuery
5
+ from .mongo_integration import ChatManager
src/json_creation/__pycache__/__init__.cpython-312.pyc ADDED
Binary file (423 Bytes). View file
 
src/json_creation/__pycache__/aggregators.cpython-312.pyc ADDED
Binary file (1.51 kB). View file
 
src/json_creation/__pycache__/app.cpython-312.pyc ADDED
Binary file (3.83 kB). View file
 
src/json_creation/__pycache__/archive_orders.cpython-312.pyc ADDED
Binary file (3.8 kB). View file
 
src/json_creation/__pycache__/items_of_orders_timestamp.cpython-312.pyc ADDED
Binary file (1.4 kB). View file
 
src/json_creation/__pycache__/json_schema.cpython-312.pyc ADDED
Binary file (1.96 kB). View file
 
src/json_creation/__pycache__/mongo_integration.cpython-312.pyc ADDED
Binary file (4.18 kB). View file
 
src/json_creation/__pycache__/ollama_llm.cpython-312.pyc ADDED
Binary file (7.15 kB). View file
 
src/json_creation/__pycache__/ollama_tool_calling.cpython-312.pyc ADDED
Binary file (12.6 kB). View file
 
src/json_creation/__pycache__/order_states_timestamp.cpython-312.pyc ADDED
Binary file (2.57 kB). View file
 
src/json_creation/__pycache__/orders.cpython-312.pyc ADDED
Binary file (944 Bytes). View file
 
src/json_creation/__pycache__/revenue.cpython-312.pyc ADDED
Binary file (1.01 kB). View file
 
src/json_creation/aggregators.py ADDED
@@ -0,0 +1,28 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from typing import List, Dict
2
+
3
+ def get_platform_order_counts(data) -> List[Dict[str, int]]:
4
+ platform_data = data.get("getPlatFormOrderTotal", [])
5
+ return [
6
+ {
7
+ "name": item["_id"], "order_count": item["count"]
8
+ } for item in platform_data
9
+ ]
10
+
11
+
12
+ def get_platform_revenue(data) -> List[Dict[str, float]]:
13
+ platform_data = data.get("getPlatFormOrderTotal", [])
14
+ return [
15
+ {
16
+ "name": item["_id"], "revenue": item["sales"]
17
+ } for item in platform_data
18
+ ]
19
+
20
+
21
+ def get_total_order_count(data) -> Dict[str, int]:
22
+ total_order_data = data.get("count", 0)
23
+ return total_order_data
24
+
25
+
26
+ def get_total_revenue_count(data) -> Dict[str, float]:
27
+ total_revenue_data = data.get("sales", 0.0)
28
+ return total_revenue_data
src/json_creation/app.py ADDED
@@ -0,0 +1,70 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from abc import ABC,abstractmethod
2
+ from typing import List, Dict
3
+ from .archive_orders import fetch_response
4
+ from .aggregators import get_platform_order_counts, get_platform_revenue,get_total_order_count,get_total_revenue_count
5
+ from .orders import count_orders_by_day
6
+ from .order_states_timestamp import count_orders_by_state_and_day
7
+ from .items_of_orders_timestamp import count_items_ordered
8
+ from .revenue import revenue_counts_by_day
9
+ import json
10
+
11
+ def display_order_counts(data):
12
+ order_counts = get_platform_order_counts(data)
13
+ order_count_dict = {
14
+ entry["name"]: entry["order_count"] for entry in order_counts
15
+ }
16
+ print("number_of_order_aggregator_wise:")
17
+ print(order_count_dict)
18
+ return {"number_of_order_aggregator_wise":order_count_dict}
19
+
20
+
21
+ def display_revenue(data):
22
+ revenue = get_platform_revenue(data)
23
+ revenue_dict = {entry["name"]: entry["revenue"] for entry in revenue}
24
+ print("revenue_aggregator_wise:")
25
+ print(revenue_dict)
26
+ return {"revenue_aggregator_wise":revenue_dict}
27
+
28
+
29
+ def display_total_order_count(data):
30
+ total_order_count = get_total_order_count(data)
31
+ print('total_orders:',total_order_count)
32
+ return {"total_orders":total_order_count}
33
+
34
+ def display_total_revenue_count(data):
35
+ total_revenue_count = get_total_revenue_count(data)
36
+ print('total_revenue:',total_revenue_count)
37
+ return {"total_revenue":total_revenue_count}
38
+
39
+ def display_daywise_order_counts(data):
40
+ order_counts1 = count_orders_by_day(data)
41
+ print("number_of_order_daywise:",order_counts1)
42
+ return {"number_of_order_daywise":order_counts1}
43
+
44
+ def display_daywise_order_states(data):
45
+ order_state_count = count_orders_by_state_and_day(data)
46
+ print(order_state_count)
47
+ return order_state_count
48
+
49
+ def display_items_ordered_counts(data):
50
+ item_order_count = count_items_ordered(data)
51
+ print("number_of_times_item_ordered:", item_order_count)
52
+ return{"number_of_times_item_ordered":item_order_count}
53
+
54
+ def display_daywise_revenue(data):
55
+ revenue_daywise = revenue_counts_by_day(data)
56
+ print("revenue_daywise:", revenue_daywise)
57
+ return {"revenue_daywise": revenue_daywise}
58
+
59
+ def final_counts(data):
60
+ number_of_order_aggregator_wise=display_order_counts(data)
61
+ revenue_aggregator_wise=display_revenue(data)
62
+ total_orders=display_total_order_count(data)
63
+ total_revenue=display_total_revenue_count(data)
64
+ number_of_order_daywise=display_daywise_order_counts(data)
65
+ order_state_count=display_daywise_order_states(data)
66
+ number_of_times_item_ordered=display_items_ordered_counts(data)
67
+ revenue_daywise = display_daywise_revenue(data)
68
+ final_dict = number_of_order_aggregator_wise | revenue_aggregator_wise | total_orders | total_revenue | number_of_order_daywise | order_state_count | number_of_times_item_ordered | revenue_daywise
69
+ return final_dict
70
+
src/json_creation/archive_orders.py ADDED
@@ -0,0 +1,83 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import requests
2
+ import base64
3
+ from Crypto.Cipher import AES
4
+ from Crypto.Util.Padding import unpad, pad
5
+ from datetime import datetime, timedelta
6
+ import json
7
+ from ..utils import dev_url
8
+
9
+ JWT_SECRET_KEY = "super-secret-key"
10
+
11
+ def encrypt(raw):
12
+ try:
13
+ raw = str(raw)
14
+ raw = pad(raw.encode(), 32)
15
+ cipher = AES.new(JWT_SECRET_KEY.encode("utf-8"), AES.MODE_ECB)
16
+ return base64.b64encode(cipher.encrypt(raw))
17
+ except Exception as e:
18
+ print(e)
19
+ return None
20
+
21
+ def get_date_range(period):
22
+ today = datetime.now()
23
+ if period == "week":
24
+ start_date = today - timedelta(days=7)
25
+ elif period == "month":
26
+ start_date = today - timedelta(days=30)
27
+ elif period == "six months":
28
+ start_date = today - timedelta(days=180)
29
+ elif period == "whole":
30
+ start_date = datetime(1970, 1, 1)
31
+ elif period == "yesterday":
32
+ start_date = today - timedelta(days=1)
33
+ end_date = start_date
34
+ elif period == "today":
35
+ start_date = today
36
+ end_date = start_date
37
+ else:
38
+ raise ValueError("Invalid period type. Choose from 'week', 'month', 'six months', 'whole', 'yesterday', or 'today'.")
39
+
40
+ end_date = today if period not in ["yesterday", "today"] else end_date
41
+ return start_date, end_date
42
+
43
+ def fetch_response(store_id, brand_id, period=None, start_date=None, end_date=None, hours=None, minutes=None, seconds=None):
44
+ if period:
45
+ start_date, end_date = get_date_range(period)
46
+ elif not start_date or not end_date:
47
+ raise ValueError("Start date and end date must be provided if period is not specified.")
48
+
49
+ if start_date == end_date:
50
+ if hours is not None and minutes is not None and seconds is not None:
51
+ start_date = start_date.replace(hour=hours, minute=minutes, second=seconds)
52
+ end_date = end_date.replace(hour=hours, minute=minutes, second=seconds)
53
+ else:
54
+ start_date = start_date.replace(hour=0, minute=0, second=0)
55
+ end_date = end_date.replace(hour=23, minute=59, second=59)
56
+
57
+ start_date_str = start_date.strftime("%Y-%m-%d %H:%M:%S")
58
+ end_date_str = end_date.strftime("%Y-%m-%d %H:%M:%S")
59
+
60
+ encrypted = encrypt(brand_id)
61
+ if encrypted is None:
62
+ print("Error in encryption")
63
+ return
64
+
65
+ url = f"https://dev.sahlhub.com/api/v1/order/archiveOrder?storeId={store_id}&startDate={start_date_str}&enddate={end_date_str}"
66
+
67
+ auth_token = "SAHL_COOKIE_KEY"
68
+ headers = {
69
+ "Content-Type": "application/json",
70
+ "userToken": auth_token,
71
+ "brandid": encrypted.decode("utf-8", "ignore"),
72
+ "origin": "https://pos-dev.sahlhub.com",
73
+ }
74
+
75
+ response = requests.request("GET", url, headers=headers)
76
+
77
+ if response.status_code == 200:
78
+ data = response.json()
79
+ return data
80
+ else:
81
+ print(f"Failed to fetch data. Status code: {response.status_code}")
82
+ print(response.text)
83
+ return None
src/json_creation/items_of_orders_timestamp.py ADDED
@@ -0,0 +1,28 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from collections import defaultdict
2
+ def count_items_ordered(data):
3
+ """
4
+ Count the number of times each item is ordered.
5
+
6
+ Args:
7
+ data (dict): A dictionary containing order data with details about items.
8
+
9
+ Returns:
10
+ dict: A dictionary with item titles as keys and their order counts as values.
11
+ """
12
+ item_count = defaultdict(int)
13
+
14
+ for order in data.get("data", []):
15
+ for item in order.get("data", [])[0].get("order", {}).get("items", []):
16
+ item_title = item.get("title", "")
17
+ quantity = item.get("quantity", 0)
18
+ if item_title:
19
+ try:
20
+ quantity = int(quantity)
21
+ item_count[item_title] += quantity
22
+ except ValueError:
23
+ print(f"Skipping item {item_title} with invalid quantity: {quantity}")
24
+ pass
25
+
26
+ return dict(item_count)
27
+
28
+
src/json_creation/json_schema.py ADDED
@@ -0,0 +1,69 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ json_schema = {
2
+ "$schema": "http://json-schema.org/draft-07/schema#",
3
+ "title": "RestaurantOrderData",
4
+ "type": "object",
5
+ "properties": {
6
+ "restaurant_name": {"type": "string", "description": "Name of the restaurant"},
7
+ "number_of_times_item_ordered": {
8
+ "type": "object",
9
+ "description": "Number of times each item has been ordered. Each key is item name and value is number of times it get ordered",
10
+ "additionalProperties": {
11
+ "type": "integer",
12
+ "description": "Count of times the item was ordered",
13
+ },
14
+ },
15
+ "number_of_order_daywise": {
16
+ "type": "object",
17
+ "description": "Number of orders received each day. Each key is date and value is number of orders",
18
+ "additionalProperties": {
19
+ "type": "integer",
20
+ "description": "Count of orders for the specific day",
21
+ },
22
+ },
23
+ "number_of_order_cancelled": {
24
+ "type": "object",
25
+ "description": "Number of orders received each day. Each key is date and value is number of orders get canceled",
26
+ "additionalProperties": {
27
+ "type": "integer",
28
+ "description": "Count of canceled orders for the specific day",
29
+ },
30
+ },
31
+ "number_of_order_completed": {
32
+ "type": "object",
33
+ "description": "Number of orders received each day. Each key is date and value is number of orders get completed",
34
+ "additionalProperties": {
35
+ "type": "integer",
36
+ "description": "Count of completed orders for the specific day",
37
+ },
38
+ },
39
+ "number_of_order_aggregator_wise": {
40
+ "type": "object",
41
+ "description": "Number of orders received from each aggregator. Each key is aggregator name and value is number of orders get on that specific aggregator",
42
+ "additionalProperties": {
43
+ "type": "integer",
44
+ "description": "Count of orders for the specific aggregator",
45
+ },
46
+ },
47
+ "total_revenue": {"type": "number", "description": "Total revenue generated"},
48
+ "total_orders": {"type": "integer", "description": "Total number of orders"},
49
+ "revenue_daywise": {
50
+ "type": "object",
51
+ "description": "Revenue generated each day. Each key is date and value is total revenue generated on that specific date",
52
+ "additionalProperties": {
53
+ "type": "number",
54
+ "description": "Revenue for the specific day",
55
+ },
56
+ },
57
+ },
58
+ "required": [
59
+ "restaurant_name",
60
+ "number_of_times_item_ordered",
61
+ "number_of_order_daywise",
62
+ "number_of_order_canceled",
63
+ "number_of_order_completed",
64
+ "number_of_order_aggregator_wise",
65
+ "total_revenue",
66
+ "total_orders",
67
+ "revenue_daywise",
68
+ ],
69
+ }
src/json_creation/mongo_integration.py ADDED
@@ -0,0 +1,84 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from mongoengine import *
2
+ from datetime import datetime
3
+
4
+ MONGODB_URI="mongodb+srv://brijrvl:[email protected]/?retryWrites=true&w=majority&appName=orderbot"
5
+ DATABASE_NAME="analyticsbot"
6
+
7
+ connect(
8
+ db=DATABASE_NAME,
9
+ host=MONGODB_URI,
10
+ )
11
+
12
+ class ChatManager:
13
+ def __init__(self):
14
+ self.connect_to_db()
15
+
16
+ def connect_to_db(self):
17
+ # Connection to MongoDB
18
+ connect(db=DATABASE_NAME, host=MONGODB_URI)
19
+
20
+ class Session(Document):
21
+ phone_number_id = StringField(required=True, unique=True)
22
+ chat_history = StringField(default="")
23
+ meta = {
24
+ 'collection': 'sessions',
25
+ 'db_alias': 'default'
26
+ }
27
+
28
+ class Message(Document):
29
+ session_id = ReferenceField('ChatManager.Session', required=True)
30
+ phone_number_id = StringField(required=True)
31
+ role = StringField(choices=['USER', 'ASSISTANT'], required=True)
32
+ content = StringField(required=True)
33
+ timestamp = DateTimeField(default=datetime.utcnow, required=True)
34
+ meta = {
35
+ 'collection': 'messages',
36
+ 'db_alias': 'default'
37
+ }
38
+
39
+ def get_or_create_session(self, phone_number):
40
+ session = self.Session.objects(phone_number_id=phone_number).first()
41
+ if not session:
42
+ session = self.Session(phone_number_id=phone_number).save()
43
+ return session
44
+
45
+ def save_message(self, session_id, phone_number, role, content):
46
+ print("Here 3")
47
+ message = self.Message(
48
+ session_id=session_id,
49
+ phone_number_id=phone_number,
50
+ role=role,
51
+ content=content,
52
+ timestamp=datetime.utcnow()
53
+ )
54
+ message.save()
55
+ print("Here 4")
56
+ return message
57
+
58
+ def get_last_n_messages(self, session, n=10):
59
+ # Retrieve the last `n` messages for the given session
60
+ messages = self.Message.objects(session_id=session.id).order_by('-timestamp')[:n]
61
+ return messages
62
+
63
+ def build_chat_history(self, messages):
64
+ # Build chat history string from the retrieved messages
65
+ chat_history = []
66
+ for message in messages:
67
+ chat_history.append(f"{message.role}: {message.content}")
68
+ return "\n".join(reversed(chat_history))
69
+
70
+ def handle_message(self, phone_number, content):
71
+ session = self.get_or_create_session(phone_number)
72
+
73
+ self.save_message(session.id, phone_number, 'USER', content)
74
+
75
+ last_messages = self.get_last_n_messages(session)
76
+
77
+ if len(last_messages) >= 10:
78
+ last_three_messages = last_messages[:10]
79
+ else:
80
+ last_three_messages = last_messages
81
+
82
+ str_chat_history = self.build_chat_history(last_three_messages)
83
+
84
+ return str_chat_history
src/json_creation/ollama_llm.py ADDED
@@ -0,0 +1,164 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import json
2
+ from fastapi import Query
3
+ import logging
4
+ from llama_index.core.agent import ReActAgent
5
+ from llama_index.core.indices.struct_store import JSONQueryEngine
6
+ from llama_index.core.llms import ChatMessage
7
+ from llama_index.core import PromptTemplate
8
+ from datetime import datetime
9
+ from .app import final_counts # noqa
10
+ from .archive_orders import fetch_response
11
+ from typing import Optional
12
+ from datetime import datetime, timedelta
13
+ from ..utils import (
14
+ llm,
15
+ ) # (this is to take llm from utils and llm in utils is called from config)
16
+ from .json_schema import json_schema
17
+ import ollama
18
+ from config import settings
19
+ import sys
20
+ import math
21
+
22
+ sys.path.append("..")
23
+
24
+
25
+ client = ollama.Client(host=settings.HOST_URI)
26
+
27
+
28
+ def fetch_data(
29
+ store_id: str,
30
+ brand_id: str,
31
+ # period: Optional[str] = Query(None, enum=["week", "month", "six months", "whole", "yesterday", "today"]),
32
+ start_date: Optional[str] = None,
33
+ end_date: Optional[str] = None,
34
+ # hours: Optional[int] = None,
35
+ # minutes: Optional[int] = None,
36
+ # seconds: Optional[int] = None,
37
+ # number_of_days: Optional[int] = None
38
+ ):
39
+ if start_date and end_date:
40
+ try:
41
+ start_date_obj = datetime.strptime(start_date, "%Y-%m-%d")
42
+ end_date_obj = datetime.strptime(end_date, "%Y-%m-%d")
43
+ except ValueError:
44
+ return {"error": "Dates must be in YYYY-MM-DD format."}
45
+
46
+ data = fetch_response(store_id, brand_id, None, start_date_obj, end_date_obj)
47
+
48
+ if data:
49
+ return final_counts(data)
50
+ else:
51
+ return {"error": "Failed to fetch data"}
52
+
53
+
54
+ def greetings_function(query):
55
+ query = query.lower()
56
+ analysis_prompt = f"""
57
+ You are a restaurant menu bot. Your task is to greet the user with one-liner.
58
+ Keep your response short, playful, and engaging.
59
+ """
60
+
61
+ messages = [
62
+ {"role": "system", "content": analysis_prompt},
63
+ {"role": "user", "content": query},
64
+ ]
65
+
66
+ analysis_response = client.chat(model="llama3.1", messages=messages)
67
+
68
+ print("----" * 20)
69
+ print(analysis_response["message"])
70
+ print("----" * 20)
71
+ return analysis_response["message"]["content"].strip()
72
+
73
+
74
+ def get_data(
75
+ query: str,
76
+ start_date: str,
77
+ end_date: str,
78
+ store_id="634fdb58ad4c218c52bfaf4f",
79
+ brand_id="6347b5f0851f703b75b39ad0",
80
+ ) -> str:
81
+
82
+ print(f"QUERY :: {query}")
83
+ print(f"START DATE :: {start_date}")
84
+ print(f"END DATE :: {end_date}")
85
+
86
+ data = fetch_data(
87
+ store_id=store_id,
88
+ brand_id=brand_id,
89
+ start_date=start_date,
90
+ end_date=end_date,
91
+ )
92
+
93
+ if "error" in data:
94
+ return data["error"]
95
+
96
+ a = data
97
+
98
+
99
+ def create_dynamic_prompt(query):
100
+ """Create a dynamic prompt."""
101
+ prompt = f"""
102
+
103
+ now if dates are 00:00:0000 that means there will no nothing to get data , just pass the queary as it in simplified way dont try to fetch the data
104
+ The following is a task for an intelligent assistant:
105
+
106
+ Here is the JSON with order details of a restaurant named "Wrap and Dip Cafe":
107
+ {json.dumps(a, indent=2)}
108
+ {print("awdnjznnfjldznjnz : ",a)}
109
+
110
+ Given the JSON schema for reference:
111
+ {json.dumps(json_schema, indent=2)}
112
+
113
+ You are a JSON analysis engine designed to answer questions based on the given restaurant order data. The data includes various aspects such as the number of times each item has been ordered, the number of orders per day, cancellations, completions, orders by aggregator, total revenue, total orders, and daily revenue.
114
+
115
+ When asked a question, follow these steps:
116
+
117
+ 1. Understand the question and identify the relevant parts of the JSON data.
118
+ 2. Extract the necessary information from the JSON data.
119
+ 3. Perform any required calculations or comparisons.
120
+ 4. Provide a concise and accurate answer without including unnecessary details.
121
+ 5. If you encounter more than one answer, provide them in a list.
122
+ 6. Provide accurate, concise, and clear answers based on the JSON data provided.
123
+ 7. I only want the response to be printed after 'So,the answer is' or 'Therefore,the answer is' and not the based on json data line.
124
+
125
+ Special attention should be given to queries about items ordered the most. These queries require looking into "number_of_times_item_ordered" and identifying the item(s) with the highest count.
126
+
127
+ Here are a few examples of questions you should be able to answer:
128
+
129
+ - "Which item is ordered the most?"
130
+ - Look into "number_of_times_item_ordered" and find the item with the highest count.
131
+
132
+ - "On which date was the highest revenue collected?"
133
+ - Look into "revenue_daywise" and find the date with the highest revenue.
134
+
135
+ - "How many orders were completed on 2024-04-22?"
136
+ - Look into "number_of_order_completed" for the value corresponding to "2024-04-22".
137
+
138
+ - "What is the total revenue generated?"
139
+ - Return the value from the sum of "total_revenue".
140
+
141
+ - "How many orders were canceled on 2024-03-13?"
142
+ - Look into "number_of_order_canceled" for the value corresponding to "2024-03-13".
143
+
144
+ - "Find the item with exactly 3 orders."
145
+ - Look into "number_of_times_item_ordered" and find the item(s) with a count of 3.
146
+
147
+ - "What is the revenue for last 16 days?"
148
+ - Look into "revenue_day_wise" and take the revenues two at a time and do the sum and then third and so on."
149
+
150
+ Use these examples to guide your responses to similar questions. If you encounter a new type of question, use the structure and examples to determine how to extract and compute the answer.
151
+
152
+ Remember, your goal is to provide accurate, concise, and clear answers based on the JSON data provided. Do not generate lengthy responses or include detailed breakdowns unless explicitly asked. Return only the direct answer to the question.
153
+
154
+ The user's query is as follows: "{query}"
155
+ """
156
+ messages = [
157
+ ChatMessage(role="system", content=prompt),
158
+ ChatMessage(role="user", content=query),
159
+ ]
160
+ resp = llm.chat(messages)
161
+ return resp.message.content
162
+
163
+ response = create_dynamic_prompt(query)
164
+ return response
src/json_creation/ollama_tool_calling.py ADDED
@@ -0,0 +1,246 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from .ollama_llm import get_data
2
+ from datetime import datetime
3
+
4
+ # from ..config import settings
5
+ from config import settings
6
+ import ollama
7
+ from ..utils import llm
8
+ import traceback
9
+ from .ollama_llm import greetings_function
10
+
11
+ import sys
12
+
13
+ sys.path.append("..")
14
+
15
+
16
+ class OllamaContextQuery:
17
+ def __init__(self) -> None:
18
+ # self.client=ollama.Client(host=settings.HOST_URI)
19
+ self.client = ollama.Client(host=settings.HOST_URI)
20
+
21
+ def ollama_context_query(self, chat_history, user_query):
22
+ print(user_query)
23
+ prompt = (
24
+ "You are a Contextualization engine for Restaurant Order Analytics. Your job is to rewrite the user's natural language query so it is optimized for tool processing without altering its original meaning utilizing given chat history.\n"
25
+ "IMPORTANT: You have below tool available:\n"
26
+ # "- 'greet tool': For queries related to greetings or conversational starters.\n"
27
+ "- 'get-data': For queries related to restaurant order data, top-selling items, or date-related information.\n"
28
+ "GUIDELINES:\n"
29
+ "- If the query involves any tool utilization, ensure you return tool name in response nothing else.\n"
30
+ "- If the query don't involves any tool utilization, respond naturally and ask to try diffrent question in regards to know about their restaturant analytics and don't mention any tool name nothing else.\n"
31
+ "- Convert any date information to the format YYYY-MM-DD.\n"
32
+ "- If someone asks about you (e.g., 'Who are you?'), respond with: 'I am SAHL analytics bot.'\n"
33
+ "- Include previous chat history between the user and assistant to ensure continuity in the conversation.\n"
34
+ "- Make minimal changes to the query. If no changes are needed, return the query as is.\n"
35
+ "- Ensure that the meaning and context of the query are preserved exactly.\n"
36
+ f"NOTE: The current date and time is: {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}\n"
37
+ f"PREVIOUS CHAT HISTORY of a USER : {chat_history}"
38
+ "Use above chat history to generate context query, don't include in response."
39
+ )
40
+ system_msg = {"role": "system", "content": prompt}
41
+ user_msg = {"role": "user", "content": user_query}
42
+ response = self.client.chat(
43
+ model="llama3.1",
44
+ messages=[system_msg, user_msg],
45
+ )
46
+ context_query = response["message"]["content"]
47
+ print("CONTEXT .... ")
48
+ print(context_query)
49
+ if "get-data" in context_query:
50
+ return context_query, True
51
+
52
+ return context_query, False
53
+
54
+ def ollama_tool_call(self, context_query):
55
+ print(context_query)
56
+ try:
57
+ prompt = (
58
+ "You are a Tool Calling engine. Assume queries are related to Restaurant Order Analytics.\n"
59
+ f'Note that the current date and time is: {datetime.now().strftime("%Y-%m-%d")}\n'
60
+ "if There is greetings or anything like that give responce according to that like in natural language chat according to it.\n"
61
+ "IMPORTANT RULES:\n"
62
+ "- If the query is about 'today,' the start date should be today's date, and the end date should be tomorrow's date.\n"
63
+ "- If the query is about 'yesterday,' the start date should be the date of the day before yesterday, and the end date should be yesterday's date.\n"
64
+ "- If the query specifies a particular date, set the start date to that date and the end date to the day after that date (e.g., for 2024-07-23, start date: 2024-07-23, end date: 2024-07-24).\n"
65
+ "- For 'last week,' set the start date to 7 days before today, and the end date will be today's date.\n"
66
+ "- For 'last month,' set the start date to the same day of the previous month (or the 1st of the previous month if today is the 1st), and the end date to today's date.\n"
67
+ "-STRICTLY:(IMPORTANT) For 'last X days' (e.g., last 5 days), set the start date to X days before today, and the end date to today's date.\n"
68
+ "- Always ensure dates are in the yyyy-mm-dd format.\n"
69
+ "STRICTLY:\n"
70
+ "- For any range, the end date must be the day after the last date in the range. For example, for the range 2024-07-20 to 2024-07-23, the start date is 2024-07-20, and the end date is 2024-07-24.\n"
71
+ "IMPORTANT: You need to take the same as the Contextual query you have been provided cause it is already optimized, no need to change the query. Your main work is to choose the start date and end date."
72
+ "IMPORTANT : Few Examples : Lets Say Today's date is 2024-10-09"
73
+ "EXAMPLES:\n"
74
+ "Example 1: Query about today\n"
75
+ 'User Query: "What are the total orders for today?"\n'
76
+ 'Model Response: "For today, the start date is 2024-10-09, and the end date is 2024-10-10."\n\n'
77
+
78
+ "Example 2: Query about yesterday\n"
79
+ 'User Query: "What was the revenue yesterday? or "\n'
80
+ 'Model Response: "For yesterday, the start date is 2024-10-08, and the end date is 2024-10-09."\n\n'
81
+
82
+ "Example 3: Query about a specific date\n"
83
+ 'User Query: "Show me the orders for 2024-07-23."\n'
84
+ 'Model Response: "For the specific date 2024-07-23, the start date is 2024-07-23, and the end date is 2024-07-24."\n\n'
85
+
86
+ "Example 4: Query about the last week\n"
87
+ 'User Query: "What were the best-selling items last week?"\n'
88
+ 'Model Response: "For last week, the start date is 2024-10-03, and the end date is 2024-10-09."\n\n'
89
+
90
+ "Example 5: Query about the last month\n"
91
+ 'User Query: "What was the total revenue for last month?"\n'
92
+ 'Model Response: "For last month, the start date is 2024-09-01, and the end date is 2024-10-01."\n\n'
93
+
94
+ "Example 6: Query about the last X days\n"
95
+ 'User Query: "what is revenue last 7 days."\n'
96
+ 'Model Response: "For the last 7 days, the start date is 2024-10-03, and the end date is 2024-10-09."\n\n'
97
+ 'User Query: "what is revenue last 2 days."\n'
98
+ 'Model Response: "For the last 2 days, the start date is 2024-10-07, and the end date is 2024-10-09."\n\n'
99
+ 'User Query: "what is revenue last 5 days."\n'
100
+ 'Model Response: "For the last 5 days, the start date is 2024-10-04, and the end date is 2024-10-09."\n\n'
101
+
102
+ "MOST_IMPORTANT : Query about the last X days : take the start date , the x days before todays's date and the end date should be today's Date , as simple as that no need to overcook anything"
103
+
104
+ f"IMPORTANT : Note that the Today's date : {datetime.now().strftime('%Y-%m-%d')}\n"
105
+ "you have today's date take the start and end date according ly through the examples."
106
+
107
+ )
108
+
109
+ messages = [{"role": "system", "content": prompt}]
110
+ messages.append({"role": "user", "content": context_query})
111
+
112
+ response = self.client.chat(
113
+ model="llama3.1",
114
+ messages=messages,
115
+ tools=[
116
+ {
117
+ "type": "function",
118
+ "function": {
119
+ "name": "get_data",
120
+ "description": "Get Restaurant Order Information with detailed with natural language query",
121
+ "parameters": {
122
+ "type": "object",
123
+ "properties": {
124
+ "query": {
125
+ "type": "string",
126
+ "description": "natural language query string to be processed",
127
+ },
128
+ "start_date": {
129
+ "type": "string",
130
+ "description": "Start date in YYYY-MM-DD format",
131
+ },
132
+ "end_date": {
133
+ "type": "string",
134
+ "description": "End date in YYYY-MM-DD format",
135
+ },
136
+ },
137
+ "required": ["query"],
138
+ },
139
+ },
140
+ }
141
+ # {
142
+ # "type": "function",
143
+ # "function": {
144
+ # "name": "greetings_function",
145
+ # "description": "Returns an appropriate greeting message based on the user's query",
146
+ # "parameters": {
147
+ # "type": "object",
148
+ # "properties": {
149
+ # "query": {
150
+ # "type": "string",
151
+ # "description": "The user's query to determine the appropriate greeting",
152
+ # },
153
+ # },
154
+ # "required": ["query"],
155
+ # },
156
+ # },
157
+ # },
158
+ ],
159
+ )
160
+
161
+ messages.append(response["message"])
162
+ tool_calls = response["message"].get("tool_calls", [])
163
+
164
+ if tool_calls:
165
+ tool_name = tool_calls[0].get("function").get("name")
166
+ # if tool_name == "greetings_function":
167
+ # return self.handle_greetings(response)
168
+ if tool_name == "get_data":
169
+ return self.handle_get_data(tool_calls)
170
+ else:
171
+ return response["message"]["content"]
172
+ else:
173
+ return response["message"]["content"], False
174
+
175
+ except Exception as e:
176
+ print(f"Error: {e}")
177
+ return "There was an error processing the query."
178
+
179
+ def handle_greetings(self, response):
180
+ tool_args = (
181
+ response["message"]["tool_calls"][0].get("function").get("arguments")
182
+ )
183
+ greeting_message = greetings_function(tool_args["query"])
184
+ print("**** GREETING MESSAGE ****")
185
+ print(greeting_message)
186
+ print("**************************")
187
+ return greeting_message, True
188
+
189
+ def handle_get_data(self, tool_calls):
190
+ available_functions = {"get_data": get_data}
191
+ for tool in tool_calls:
192
+ function_to_call = available_functions[tool["function"]["name"]]
193
+ function_args = tool["function"]["arguments"]
194
+ function_response = function_to_call(**function_args)
195
+ print(
196
+ f"**** DATA RESPONSE ****\n{function_response}\n***********************"
197
+ )
198
+ return function_response, False
199
+
200
+ def summarised_output(self, messages, chat_history, context_query, user_query):
201
+ prompt = (
202
+ "You are a Summarised responses engine. Assume queries are related to Restaurant Order Analytics.\n"
203
+ "Your task is to provide relevant information about user query, in prompt you will get all the information about user's chat history and all the information provided by the tool calling expert.\n"
204
+ "Use only provided information Don't use any extra information from your end.\n"
205
+ f'Note that the current date and time is: {datetime.now().strftime("%Y-%m-%d %H:%M:%S")}\n'
206
+ "Merge the sentence in simplified way , dont over cook anything just a basic merger"
207
+ "always show the numbers if provided."
208
+ "Never Compare."
209
+ "Merge the sentence in a simplified way, without adding unnecessary context."
210
+ "if the answer is too long , without junk than provide the full answer in simplified way , and provide in the list"
211
+ "Remove any phrases like 'Based on the JSON schema...' or 'Here is the relevant information in simplified form."
212
+ "Provide the final result directly, formatted for customer readability."
213
+ "If data not found , dont use json data word , just decline that perticular question. there is none"
214
+ "Ignore unnecessary lines which does not need to show in answer , cause we need to show this to customer so just give the relevent information, no need to use any extra lines EXAMPLE:{top seeling item is this with order count of this.} This is how a simplified version looks like"
215
+ f"PREVIOUS CHAT HISTORY: {chat_history} , never include this in response"
216
+ f"If the question is new than no need to consider {chat_history}"
217
+ f"Current Tool responce: {messages}"
218
+ f"Contextualization query: {context_query}"
219
+ "IMPORTANT : Here i am providing user_query so you can answer according to that."
220
+ "Example 1 :"
221
+ "user_query : What is the last day's revenue?"
222
+ "expected_response" "Last day's revenue is $1,200."
223
+ "Example 2 :"
224
+ "user_query : What was the top-selling item yesterday?"
225
+ "expected_response" "The top-selling item yesterday was Margherita Pizza with 120 orders."
226
+ "Example 3 :"
227
+ "user_query : How many orders were placed today?"
228
+ "expected_response" "There were 350 orders placed today."
229
+ "Exaple 4 :"
230
+ "user_query : What is the revenue for the last 7 days?"
231
+ "expected_response" "Revenue for the last 7 days is $8,500."
232
+ "use this user queary to generate response according to the user query , if last day asked must use last day same for week , last 7 days , last 15 days , what ever asked just use that and give answer after : this"
233
+ f"user_query : {user_query}"
234
+ "STRICT : Don't use 'However, I didn't use any tool this time or Try a different question regarding restaurant analytics.' this type of any suggetions or anything"
235
+ )
236
+ print("user_query : ",user_query)
237
+ print("messages : ",messages)
238
+
239
+ system_msg = {"role": "system", "content": prompt}
240
+ user_msg = {"role": "user", "content": user_query}
241
+ response = self.client.chat(
242
+ model="llama3.1",
243
+ messages=[system_msg, user_msg],
244
+ )
245
+ context_query = response["message"]["content"]
246
+ return context_query
src/json_creation/order_states_timestamp.py ADDED
@@ -0,0 +1,41 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from collections import defaultdict
2
+ from datetime import datetime
3
+
4
+ def count_orders_by_state_and_day(data):
5
+ """
6
+ Count the number of orders for each state day-wise, considering only the last state for each order on a given day.
7
+
8
+ Args:
9
+ data (list): A list of orders, where each order contains state changes and timestamps.
10
+
11
+ Returns:
12
+ dict: A dictionary with states as keys and another dictionary as values.
13
+ The inner dictionary contains dates as keys and their counts as values.
14
+ """
15
+ last_state_by_order_and_date = {}
16
+
17
+ # Iterate through the orders to track the last state for each order on each date
18
+ for order in data.get("data", []):
19
+ for detail in order.get("data", []):
20
+ state = detail.get("order_state")
21
+ timestamp = detail.get("timestamp_unix")
22
+ order_id = detail.get("order_id")
23
+ if state and timestamp and order_id:
24
+ date = datetime.fromtimestamp(timestamp / 1000).date()
25
+ last_state_by_order_and_date[(order_id, date)] = state
26
+
27
+ # Count the last states for each date
28
+ orders_state_count_by_day = defaultdict(lambda: defaultdict(int))
29
+ for (order_id, date), state in last_state_by_order_and_date.items():
30
+ orders_state_count_by_day[date][state] += 1
31
+
32
+ # Prepare the final output in the desired format
33
+ formatted_output = defaultdict(lambda: defaultdict(int))
34
+ for date, state_counts in orders_state_count_by_day.items():
35
+ for state, count in state_counts.items():
36
+ state_key = f"number_of_order_{state.lower()}"
37
+ formatted_output[state_key][str(date)] += count
38
+
39
+ # Convert defaultdict to regular dict for cleaner output
40
+ final_output = {state: dict(date_counts) for state, date_counts in formatted_output.items()}
41
+ return final_output
src/json_creation/orders.py ADDED
@@ -0,0 +1,16 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from datetime import datetime
2
+
3
+
4
+ def count_orders_by_day(data):
5
+
6
+ order_count_by_day = {}
7
+
8
+ for order in data.get("data", []):
9
+ timestamp = order.get("orderCreatedAt", None)
10
+ if timestamp:
11
+ date = datetime.fromtimestamp(timestamp / 1000).date()
12
+ if date.strftime("%Y-%m-%d") not in order_count_by_day.keys():
13
+ order_count_by_day[date.strftime("%Y-%m-%d")] = 1
14
+ else:
15
+ order_count_by_day[date.strftime("%Y-%m-%d")] += 1
16
+ return order_count_by_day
src/json_creation/revenue.py ADDED
@@ -0,0 +1,22 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from datetime import datetime
2
+
3
+ def revenue_counts_by_day(data):
4
+ revenue_counts_by_day = {}
5
+
6
+ for rev in data.get("data", []):
7
+ order_created_at = rev.get('orderCreatedAt')
8
+ timestamp_s = order_created_at / 1000
9
+ dt = datetime.fromtimestamp(timestamp_s)
10
+ date_str = dt.strftime('%Y-%m-%d')
11
+
12
+ order_subtotal = rev['data'][0]['order']['details'].get('order_total')
13
+ order_subtotal = float(order_subtotal) if order_subtotal is not None else 0.0 # Ensure order_subtotal is a float
14
+
15
+ if date_str in revenue_counts_by_day:
16
+ revenue_counts_by_day[date_str] += order_subtotal
17
+ else:
18
+ revenue_counts_by_day[date_str] = order_subtotal
19
+
20
+ return revenue_counts_by_day
21
+
22
+
src/utils.py ADDED
@@ -0,0 +1,19 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from pymongo import MongoClient
2
+ import sys
3
+ from config import settings
4
+ import ollama
5
+ from llama_index.llms.ollama import Ollama
6
+
7
+ sys.path.append("..")
8
+
9
+ client = MongoClient(settings.MONGODB_URI)
10
+ db = client[settings.DB_NAME]
11
+ client2 = ollama.Client(host=settings.HOST_URI)
12
+ model_name = settings.MODEL_NAME
13
+ llm = Ollama(
14
+ model="llama3.1", request_timeout=120, base_url=settings.HOST_URI, temperature=0
15
+ )
16
+ dev_url = settings.DEV_URI
17
+ orders_collection = db["orders"]
18
+ stores_collection = db["stores"]
19
+ brands_collection = db["brands"]
src/whatsapp_integration/__init__.py ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ from .gathering_data import gathering_data # noqa
2
+ from .sending_message import normal_message # noqa
src/whatsapp_integration/__pycache__/__init__.cpython-312.pyc ADDED
Binary file (276 Bytes). View file
 
src/whatsapp_integration/__pycache__/gathering_data.cpython-312.pyc ADDED
Binary file (1.21 kB). View file
 
src/whatsapp_integration/__pycache__/sending_message.cpython-312.pyc ADDED
Binary file (1.13 kB). View file
 
src/whatsapp_integration/gathering_data.py ADDED
@@ -0,0 +1,21 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ def gathering_data(res):
2
+ try:
3
+ phone_number = res["entry"][0]["changes"][0]["value"]["messages"][0]["from"]
4
+ except BaseException:
5
+ return None
6
+
7
+ try:
8
+ text = res["entry"][0]["changes"][0]["value"]["messages"][0]["text"]["body"]
9
+ try:
10
+ text = int(text)
11
+ except BaseException:
12
+ pass
13
+ except BaseException:
14
+ try:
15
+ text = res["entry"][0]["changes"][0]["value"]["messages"][0]["button"][
16
+ "text"
17
+ ]
18
+ except BaseException:
19
+ text = res["entry"][0]["changes"][0]["value"]["messages"][0]["location"]
20
+
21
+ return phone_number, str(text)
src/whatsapp_integration/sending_message.py ADDED
@@ -0,0 +1,23 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from config import settings
2
+ import requests
3
+ import sys
4
+
5
+ sys.path.append("..")
6
+
7
+
8
+ def normal_message(message, number):
9
+ print("Phone number id for sending message ::::::::::::::::: ", settings.PHONE_NUMBER_ID)
10
+ headers = {"Authorization": settings.BEARER_TOKEN}
11
+ data = {
12
+ "messaging_product": "whatsapp",
13
+ "to": number,
14
+ "text": {"body": message},
15
+ }
16
+ print(f"DATA ::: {data}")
17
+ response = requests.post(
18
+ f"https://graph.facebook.com/v20.0/{settings.PHONE_NUMBER_ID}/messages",
19
+ headers=headers,
20
+ json=data,
21
+ )
22
+
23
+ print(response.text)