redfernstech commited on
Commit
b84746b
·
verified ·
1 Parent(s): 81878b1

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +345 -135
app.py CHANGED
@@ -1,77 +1,295 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
  import os
2
  import time
3
- from fastapi import FastAPI,Request
4
- from fastapi.responses import HTMLResponse
5
  from fastapi.staticfiles import StaticFiles
6
  from llama_index.core import StorageContext, load_index_from_storage, VectorStoreIndex, SimpleDirectoryReader, ChatPromptTemplate, Settings
7
  from llama_index.embeddings.huggingface import HuggingFaceEmbedding
8
  from pydantic import BaseModel
9
- from fastapi.responses import JSONResponse
10
- import uuid # for generating unique IDs
11
- import datetime
12
  from fastapi.middleware.cors import CORSMiddleware
13
  from fastapi.templating import Jinja2Templates
14
  from huggingface_hub import InferenceClient
15
  import json
16
- import re
17
- from gradio_client import Client
18
  from simple_salesforce import Salesforce, SalesforceLogin
19
  from llama_index.llms.huggingface import HuggingFaceLLM
20
- # from llama_index.llms.huggingface import HuggingFaceInferenceAPI
21
-
22
 
23
- # Define Pydantic model for incoming request body
24
  class MessageRequest(BaseModel):
25
  message: str
26
- repo_id = "meta-llama/Meta-Llama-3-8B-Instruct"
27
- llm_client = InferenceClient(
28
- model=repo_id,
29
- token=os.getenv("HF_TOKEN"),
30
- )
31
-
32
 
33
- os.environ["HF_TOKEN"] = os.getenv("HF_TOKEN")
34
- username = os.getenv("username")
35
- password = os.getenv("password")
36
- security_token = os.getenv("security_token")
37
- domain = os.getenv("domain")# Using sandbox environment
38
- session_id, sf_instance = SalesforceLogin(username=username, password=password, security_token=security_token, domain=domain)
39
 
40
- # Create Salesforce object
41
- sf = Salesforce(instance=sf_instance, session_id=session_id)
 
 
 
 
 
 
 
 
 
42
 
 
43
  app = FastAPI()
44
 
45
-
46
  @app.middleware("http")
47
  async def add_security_headers(request: Request, call_next):
48
  response = await call_next(request)
49
- response.headers["Content-Security-Policy"] = "frame-ancestors *; frame-src *; object-src *;"
50
- response.headers["X-Frame-Options"] = "ALLOWALL"
 
 
 
 
51
  return response
52
 
53
-
54
- # Allow CORS requests from any domain
55
  app.add_middleware(
56
  CORSMiddleware,
57
- allow_origins=["*"],
58
  allow_credentials=True,
59
  allow_methods=["*"],
60
  allow_headers=["*"],
61
  )
62
 
63
-
64
-
65
-
66
- @app.get("/favicon.ico")
67
- async def favicon():
68
- return HTMLResponse("") # or serve a real favicon if you have one
69
-
70
-
71
  app.mount("/static", StaticFiles(directory="static"), name="static")
72
-
73
  templates = Jinja2Templates(directory="static")
74
- # Configure Llama index settings
 
75
  Settings.llm = HuggingFaceLLM(
76
  model_name="meta-llama/Meta-Llama-3-8B-Instruct",
77
  tokenizer_name="meta-llama/Meta-Llama-3-8B-Instruct",
@@ -85,140 +303,132 @@ Settings.embed_model = HuggingFaceEmbedding(
85
  model_name="BAAI/bge-small-en-v1.5"
86
  )
87
 
 
88
  PERSIST_DIR = "db"
89
- PDF_DIRECTORY = 'data'
90
 
91
- # Ensure directories exist
92
  os.makedirs(PDF_DIRECTORY, exist_ok=True)
93
  os.makedirs(PERSIST_DIR, exist_ok=True)
 
 
94
  chat_history = []
95
  current_chat_history = []
 
96
  def data_ingestion_from_directory():
97
- documents = SimpleDirectoryReader(PDF_DIRECTORY).load_data()
98
- storage_context = StorageContext.from_defaults()
99
- index = VectorStoreIndex.from_documents(documents)
100
- index.storage_context.persist(persist_dir=PERSIST_DIR)
 
 
 
101
 
102
  def initialize():
103
  start_time = time.time()
104
- data_ingestion_from_directory() # Process PDF ingestion at startup
105
- print(f"Data ingestion time: {time.time() - start_time} seconds")
106
- def split_name(full_name):
107
- # Split the name by spaces
108
  words = full_name.strip().split()
109
-
110
- # Logic for determining first name and last name
111
  if len(words) == 1:
112
- first_name = ''
113
- last_name = words[0]
114
  elif len(words) == 2:
115
- first_name = words[0]
116
- last_name = words[1]
117
- else:
118
- first_name = words[0]
119
- last_name = ' '.join(words[1:])
120
-
121
- return first_name, last_name
122
 
123
- initialize() # Run initialization tasks
 
124
 
125
-
126
- def handle_query(query):
127
  chat_text_qa_msgs = [
128
  (
129
  "user",
130
  """
131
- You are the Clara Redfernstech chatbot. Your goal is to provide accurate, professional, and helpful answers to user queries based on the company's data. Always ensure your responses are clear and concise. Give response within 10-15 words only
132
  {context_str}
133
- Question:
134
- {query_str}
135
  """
136
  )
137
  ]
138
  text_qa_template = ChatPromptTemplate.from_messages(chat_text_qa_msgs)
139
 
140
- storage_context = StorageContext.from_defaults(persist_dir=PERSIST_DIR)
141
- index = load_index_from_storage(storage_context)
142
- context_str = ""
143
- for past_query, response in reversed(current_chat_history):
144
- if past_query.strip():
145
- context_str += f"User asked: '{past_query}'\nBot answered: '{response}'\n"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
146
 
147
-
148
- query_engine = index.as_query_engine(text_qa_template=text_qa_template, context_str=context_str)
149
- answer = query_engine.query(query)
150
-
151
- if hasattr(answer, 'response'):
152
- response=answer.response
153
- elif isinstance(answer, dict) and 'response' in answer:
154
- response =answer['response']
155
- else:
156
- response ="Sorry, I couldn't find an answer."
157
- current_chat_history.append((query, response))
158
- return response
159
  @app.get("/ch/{id}", response_class=HTMLResponse)
160
  async def load_chat(request: Request, id: str):
161
  return templates.TemplateResponse("index.html", {"request": request, "user_id": id})
162
- # Route to save chat history
163
  @app.post("/hist/")
164
  async def save_chat_history(history: dict):
165
- # Check if 'userId' is present in the incoming dictionary
166
  user_id = history.get('userId')
167
- print(user_id)
168
-
169
- # Ensure user_id is defined before proceeding
170
- if user_id is None:
171
- return {"error": "userId is required"}, 400
172
-
173
- # Construct the chat history string
174
- hist = ''.join([f"'{entry['sender']}: {entry['message']}'\n" for entry in history['history']])
175
- hist = "You are a Redfernstech summarize model. Your aim is to use this conversation to identify user interests solely based on that conversation: " + hist
176
- print(hist)
177
-
178
- # Get the summarized result from the client model
179
- result = hist
180
 
181
  try:
182
- sf.Lead.update(user_id, {'Description': result})
 
 
 
 
183
  except Exception as e:
184
- return {"error": f"Failed to update lead: {str(e)}"}, 500
185
-
186
- return {"summary": result, "message": "Chat history saved"}
187
  @app.post("/webhook")
188
  async def receive_form_data(request: Request):
189
- form_data = await request.json()
190
- # Log in to Salesforce
191
- first_name, last_name = split_name(form_data['name'])
192
- data = {
193
- 'FirstName': first_name,
194
- 'LastName': last_name,
195
- 'Description': 'hii', # Static description
196
- 'Company': form_data['company'], # Assuming company is available in form_data
197
- 'Phone': form_data['phone'].strip(), # Phone from form data
198
- 'Email': form_data['email'], # Email from form data
199
- }
200
- a=sf.Lead.create(data)
201
- # Generate a unique ID (for tracking user)
202
- unique_id = a['id']
203
-
204
- # Here you can do something with form_data like saving it to a database
205
- print("Received form data:", form_data)
206
-
207
- # Send back the unique id to the frontend
208
- return JSONResponse({"id": unique_id})
209
 
210
  @app.post("/chat/")
211
  async def chat(request: MessageRequest):
212
- message = request.message # Access the message from the request body
213
- response = handle_query(message) # Process the message
214
- message_data = {
215
- "sender": "User",
216
- "message": message,
217
- "response": response,
218
- "timestamp": datetime.datetime.now().isoformat()
219
- }
220
- chat_history.append(message_data)
221
- return {"response": response}
 
 
 
222
  @app.get("/")
223
  def read_root():
224
- return {"message": "Welcome to the API"}
 
1
+ # import os
2
+ # import time
3
+ # from fastapi import FastAPI,Request
4
+ # from fastapi.responses import HTMLResponse
5
+ # from fastapi.staticfiles import StaticFiles
6
+ # from llama_index.core import StorageContext, load_index_from_storage, VectorStoreIndex, SimpleDirectoryReader, ChatPromptTemplate, Settings
7
+ # from llama_index.embeddings.huggingface import HuggingFaceEmbedding
8
+ # from pydantic import BaseModel
9
+ # from fastapi.responses import JSONResponse
10
+ # import uuid # for generating unique IDs
11
+ # import datetime
12
+ # from fastapi.middleware.cors import CORSMiddleware
13
+ # from fastapi.templating import Jinja2Templates
14
+ # from huggingface_hub import InferenceClient
15
+ # import json
16
+ # import re
17
+ # from gradio_client import Client
18
+ # from simple_salesforce import Salesforce, SalesforceLogin
19
+ # from llama_index.llms.huggingface import HuggingFaceLLM
20
+ # # from llama_index.llms.huggingface import HuggingFaceInferenceAPI
21
+
22
+
23
+ # # Define Pydantic model for incoming request body
24
+ # class MessageRequest(BaseModel):
25
+ # message: str
26
+ # repo_id = "meta-llama/Meta-Llama-3-8B-Instruct"
27
+ # llm_client = InferenceClient(
28
+ # model=repo_id,
29
+ # token=os.getenv("HF_TOKEN"),
30
+ # )
31
+
32
+
33
+ # os.environ["HF_TOKEN"] = os.getenv("HF_TOKEN")
34
+ # username = os.getenv("username")
35
+ # password = os.getenv("password")
36
+ # security_token = os.getenv("security_token")
37
+ # domain = os.getenv("domain")# Using sandbox environment
38
+ # session_id, sf_instance = SalesforceLogin(username=username, password=password, security_token=security_token, domain=domain)
39
+
40
+ # # Create Salesforce object
41
+ # sf = Salesforce(instance=sf_instance, session_id=session_id)
42
+
43
+ # app = FastAPI()
44
+
45
+
46
+ # @app.middleware("http")
47
+ # async def add_security_headers(request: Request, call_next):
48
+ # response = await call_next(request)
49
+ # response.headers["Content-Security-Policy"] = "frame-ancestors *; frame-src *; object-src *;"
50
+ # response.headers["X-Frame-Options"] = "ALLOWALL"
51
+ # return response
52
+
53
+
54
+ # # Allow CORS requests from any domain
55
+ # app.add_middleware(
56
+ # CORSMiddleware,
57
+ # allow_origins=["*"],
58
+ # allow_credentials=True,
59
+ # allow_methods=["*"],
60
+ # allow_headers=["*"],
61
+ # )
62
+
63
+
64
+
65
+
66
+ # @app.get("/favicon.ico")
67
+ # async def favicon():
68
+ # return HTMLResponse("") # or serve a real favicon if you have one
69
+
70
+
71
+ # app.mount("/static", StaticFiles(directory="static"), name="static")
72
+
73
+ # templates = Jinja2Templates(directory="static")
74
+ # # Configure Llama index settings
75
+ # Settings.llm = HuggingFaceLLM(
76
+ # model_name="meta-llama/Meta-Llama-3-8B-Instruct",
77
+ # tokenizer_name="meta-llama/Meta-Llama-3-8B-Instruct",
78
+ # context_window=3000,
79
+ # token=os.getenv("HF_TOKEN"),
80
+ # max_new_tokens=512,
81
+ # generate_kwargs={"temperature": 0.1},
82
+ # )
83
+
84
+ # Settings.embed_model = HuggingFaceEmbedding(
85
+ # model_name="BAAI/bge-small-en-v1.5"
86
+ # )
87
+
88
+ # PERSIST_DIR = "db"
89
+ # PDF_DIRECTORY = 'data'
90
+
91
+ # # Ensure directories exist
92
+ # os.makedirs(PDF_DIRECTORY, exist_ok=True)
93
+ # os.makedirs(PERSIST_DIR, exist_ok=True)
94
+ # chat_history = []
95
+ # current_chat_history = []
96
+ # def data_ingestion_from_directory():
97
+ # documents = SimpleDirectoryReader(PDF_DIRECTORY).load_data()
98
+ # storage_context = StorageContext.from_defaults()
99
+ # index = VectorStoreIndex.from_documents(documents)
100
+ # index.storage_context.persist(persist_dir=PERSIST_DIR)
101
+
102
+ # def initialize():
103
+ # start_time = time.time()
104
+ # data_ingestion_from_directory() # Process PDF ingestion at startup
105
+ # print(f"Data ingestion time: {time.time() - start_time} seconds")
106
+ # def split_name(full_name):
107
+ # # Split the name by spaces
108
+ # words = full_name.strip().split()
109
+
110
+ # # Logic for determining first name and last name
111
+ # if len(words) == 1:
112
+ # first_name = ''
113
+ # last_name = words[0]
114
+ # elif len(words) == 2:
115
+ # first_name = words[0]
116
+ # last_name = words[1]
117
+ # else:
118
+ # first_name = words[0]
119
+ # last_name = ' '.join(words[1:])
120
+
121
+ # return first_name, last_name
122
+
123
+ # initialize() # Run initialization tasks
124
+
125
+
126
+ # def handle_query(query):
127
+ # chat_text_qa_msgs = [
128
+ # (
129
+ # "user",
130
+ # """
131
+ # You are the Clara Redfernstech chatbot. Your goal is to provide accurate, professional, and helpful answers to user queries based on the company's data. Always ensure your responses are clear and concise. Give response within 10-15 words only
132
+ # {context_str}
133
+ # Question:
134
+ # {query_str}
135
+ # """
136
+ # )
137
+ # ]
138
+ # text_qa_template = ChatPromptTemplate.from_messages(chat_text_qa_msgs)
139
+
140
+ # storage_context = StorageContext.from_defaults(persist_dir=PERSIST_DIR)
141
+ # index = load_index_from_storage(storage_context)
142
+ # context_str = ""
143
+ # for past_query, response in reversed(current_chat_history):
144
+ # if past_query.strip():
145
+ # context_str += f"User asked: '{past_query}'\nBot answered: '{response}'\n"
146
+
147
+
148
+ # query_engine = index.as_query_engine(text_qa_template=text_qa_template, context_str=context_str)
149
+ # answer = query_engine.query(query)
150
+
151
+ # if hasattr(answer, 'response'):
152
+ # response=answer.response
153
+ # elif isinstance(answer, dict) and 'response' in answer:
154
+ # response =answer['response']
155
+ # else:
156
+ # response ="Sorry, I couldn't find an answer."
157
+ # current_chat_history.append((query, response))
158
+ # return response
159
+ # @app.get("/ch/{id}", response_class=HTMLResponse)
160
+ # async def load_chat(request: Request, id: str):
161
+ # return templates.TemplateResponse("index.html", {"request": request, "user_id": id})
162
+ # # Route to save chat history
163
+ # @app.post("/hist/")
164
+ # async def save_chat_history(history: dict):
165
+ # # Check if 'userId' is present in the incoming dictionary
166
+ # user_id = history.get('userId')
167
+ # print(user_id)
168
+
169
+ # # Ensure user_id is defined before proceeding
170
+ # if user_id is None:
171
+ # return {"error": "userId is required"}, 400
172
+
173
+ # # Construct the chat history string
174
+ # hist = ''.join([f"'{entry['sender']}: {entry['message']}'\n" for entry in history['history']])
175
+ # hist = "You are a Redfernstech summarize model. Your aim is to use this conversation to identify user interests solely based on that conversation: " + hist
176
+ # print(hist)
177
+
178
+ # # Get the summarized result from the client model
179
+ # result = hist
180
+
181
+ # try:
182
+ # sf.Lead.update(user_id, {'Description': result})
183
+ # except Exception as e:
184
+ # return {"error": f"Failed to update lead: {str(e)}"}, 500
185
+
186
+ # return {"summary": result, "message": "Chat history saved"}
187
+ # @app.post("/webhook")
188
+ # async def receive_form_data(request: Request):
189
+ # form_data = await request.json()
190
+ # # Log in to Salesforce
191
+ # first_name, last_name = split_name(form_data['name'])
192
+ # data = {
193
+ # 'FirstName': first_name,
194
+ # 'LastName': last_name,
195
+ # 'Description': 'hii', # Static description
196
+ # 'Company': form_data['company'], # Assuming company is available in form_data
197
+ # 'Phone': form_data['phone'].strip(), # Phone from form data
198
+ # 'Email': form_data['email'], # Email from form data
199
+ # }
200
+ # a=sf.Lead.create(data)
201
+ # # Generate a unique ID (for tracking user)
202
+ # unique_id = a['id']
203
+
204
+ # # Here you can do something with form_data like saving it to a database
205
+ # print("Received form data:", form_data)
206
+
207
+ # # Send back the unique id to the frontend
208
+ # return JSONResponse({"id": unique_id})
209
+
210
+ # @app.post("/chat/")
211
+ # async def chat(request: MessageRequest):
212
+ # message = request.message # Access the message from the request body
213
+ # response = handle_query(message) # Process the message
214
+ # message_data = {
215
+ # "sender": "User",
216
+ # "message": message,
217
+ # "response": response,
218
+ # "timestamp": datetime.datetime.now().isoformat()
219
+ # }
220
+ # chat_history.append(message_data)
221
+ # return {"response": response}
222
+ # @app.get("/")
223
+ # def read_root():
224
+ # return {"message": "Welcome to the API"}
225
+
226
  import os
227
  import time
228
+ from fastapi import FastAPI, Request, HTTPException
229
+ from fastapi.responses import HTMLResponse, JSONResponse
230
  from fastapi.staticfiles import StaticFiles
231
  from llama_index.core import StorageContext, load_index_from_storage, VectorStoreIndex, SimpleDirectoryReader, ChatPromptTemplate, Settings
232
  from llama_index.embeddings.huggingface import HuggingFaceEmbedding
233
  from pydantic import BaseModel
 
 
 
234
  from fastapi.middleware.cors import CORSMiddleware
235
  from fastapi.templating import Jinja2Templates
236
  from huggingface_hub import InferenceClient
237
  import json
238
+ import datetime
 
239
  from simple_salesforce import Salesforce, SalesforceLogin
240
  from llama_index.llms.huggingface import HuggingFaceLLM
 
 
241
 
242
+ # Pydantic model for request body
243
  class MessageRequest(BaseModel):
244
  message: str
 
 
 
 
 
 
245
 
246
+ # Validate environment variables
247
+ required_env_vars = ["HF_TOKEN", "username", "password", "security_token", "domain"]
248
+ for var in required_env_vars:
249
+ if not os.getenv(var):
250
+ raise EnvironmentError(f"Missing required environment variable: {var}")
 
251
 
252
+ # Salesforce configuration
253
+ try:
254
+ session_id, sf_instance = SalesforceLogin(
255
+ username=os.getenv("username"),
256
+ password=os.getenv("password"),
257
+ security_token=os.getenv("security_token"),
258
+ domain=os.getenv("domain")
259
+ )
260
+ sf = Salesforce(instance=sf_instance, session_id=session_id)
261
+ except Exception as e:
262
+ raise Exception(f"Failed to initialize Salesforce: {str(e)}")
263
 
264
+ # FastAPI setup
265
  app = FastAPI()
266
 
267
+ # Security headers middleware
268
  @app.middleware("http")
269
  async def add_security_headers(request: Request, call_next):
270
  response = await call_next(request)
271
+ response.headers.update({
272
+ "Content-Security-Policy": "default-src 'self'; frame-ancestors 'self';",
273
+ "X-Frame-Options": "DENY",
274
+ "X-Content-Type-Options": "nosniff",
275
+ "Referrer-Policy": "strict-origin-when-cross-origin"
276
+ })
277
  return response
278
 
279
+ # CORS configuration
 
280
  app.add_middleware(
281
  CORSMiddleware,
282
+ allow_origins=["*"], # Consider specifying allowed origins in production
283
  allow_credentials=True,
284
  allow_methods=["*"],
285
  allow_headers=["*"],
286
  )
287
 
288
+ # Static files and templates
 
 
 
 
 
 
 
289
  app.mount("/static", StaticFiles(directory="static"), name="static")
 
290
  templates = Jinja2Templates(directory="static")
291
+
292
+ # LlamaIndex configuration
293
  Settings.llm = HuggingFaceLLM(
294
  model_name="meta-llama/Meta-Llama-3-8B-Instruct",
295
  tokenizer_name="meta-llama/Meta-Llama-3-8B-Instruct",
 
303
  model_name="BAAI/bge-small-en-v1.5"
304
  )
305
 
306
+ # Directory constants
307
  PERSIST_DIR = "db"
308
+ PDF_DIRECTORY = "data"
309
 
310
+ # Initialize directories
311
  os.makedirs(PDF_DIRECTORY, exist_ok=True)
312
  os.makedirs(PERSIST_DIR, exist_ok=True)
313
+
314
+ # Chat history storage
315
  chat_history = []
316
  current_chat_history = []
317
+
318
  def data_ingestion_from_directory():
319
+ try:
320
+ documents = SimpleDirectoryReader(PDF_DIRECTORY).load_data()
321
+ storage_context = StorageContext.from_defaults()
322
+ index = VectorStoreIndex.from_documents(documents)
323
+ index.storage_context.persist(persist_dir=PERSIST_DIR)
324
+ except Exception as e:
325
+ raise Exception(f"Data ingestion failed: {str(e)}")
326
 
327
  def initialize():
328
  start_time = time.time()
329
+ data_ingestion_from_directory()
330
+ print(f"Data ingestion completed in {time.time() - start_time:.2f} seconds")
331
+
332
+ def split_name(full_name: str) -> tuple:
333
  words = full_name.strip().split()
 
 
334
  if len(words) == 1:
335
+ return "", words[0]
 
336
  elif len(words) == 2:
337
+ return words[0], words[1]
338
+ return words[0], " ".join(words[1:])
 
 
 
 
 
339
 
340
+ # Run initialization
341
+ initialize()
342
 
343
+ def handle_query(query: str) -> str:
 
344
  chat_text_qa_msgs = [
345
  (
346
  "user",
347
  """
348
+ You are the Clara Redfernstech chatbot. Provide accurate, professional answers in 10-15 words.
349
  {context_str}
350
+ Question: {query_str}
 
351
  """
352
  )
353
  ]
354
  text_qa_template = ChatPromptTemplate.from_messages(chat_text_qa_msgs)
355
 
356
+ try:
357
+ storage_context = StorageContext.from_defaults(persist_dir=PERSIST_DIR)
358
+ index = load_index_from_storage(storage_context)
359
+
360
+ context_str = ""
361
+ for past_query, response in reversed(current_chat_history[-5:]): # Limit context to last 5 interactions
362
+ if past_query.strip():
363
+ context_str += f"User: '{past_query}'\nBot: '{response}'\n"
364
+
365
+ query_engine = index.as_query_engine(text_qa_template=text_qa_template)
366
+ answer = query_engine.query(query)
367
+
368
+ response = getattr(answer, 'response', answer.get('response', "Sorry, I couldn't find an answer."))
369
+ current_chat_history.append((query, response))
370
+ return response
371
+ except Exception as e:
372
+ return f"Error processing query: {str(e)}"
373
+
374
+ @app.get("/favicon.ico")
375
+ async def favicon():
376
+ return HTMLResponse(status_code=204)
377
 
 
 
 
 
 
 
 
 
 
 
 
 
378
  @app.get("/ch/{id}", response_class=HTMLResponse)
379
  async def load_chat(request: Request, id: str):
380
  return templates.TemplateResponse("index.html", {"request": request, "user_id": id})
381
+
382
  @app.post("/hist/")
383
  async def save_chat_history(history: dict):
 
384
  user_id = history.get('userId')
385
+ if not user_id:
386
+ raise HTTPException(status_code=400, detail="userId is required")
 
 
 
 
 
 
 
 
 
 
 
387
 
388
  try:
389
+ hist = ''.join([f"'{entry['sender']}: {entry['message']}'\n" for entry in history['history']])
390
+ summary = f"Conversation summary for user interest analysis:\n{hist}"
391
+
392
+ sf.Lead.update(user_id, {'Description': summary})
393
+ return {"summary": summary, "message": "Chat history saved"}
394
  except Exception as e:
395
+ raise HTTPException(status_code=500, detail=f"Failed to update lead: {str(e)}")
396
+
 
397
  @app.post("/webhook")
398
  async def receive_form_data(request: Request):
399
+ try:
400
+ form_data = await request.json()
401
+ first_name, last_name = split_name(form_data.get('name', ''))
402
+
403
+ data = {
404
+ 'FirstName': first_name,
405
+ 'LastName': last_name,
406
+ 'Description': 'New lead from webhook',
407
+ 'Company': form_data.get('company', 'Unknown'),
408
+ 'Phone': form_data.get('phone', '').strip(),
409
+ 'Email': form_data.get('email', ''),
410
+ }
411
+
412
+ result = sf.Lead.create(data)
413
+ return JSONResponse({"id": result['id']})
414
+ except Exception as e:
415
+ raise HTTPException(status_code=500, detail=f"Failed to process webhook: {str(e)}")
 
 
 
416
 
417
  @app.post("/chat/")
418
  async def chat(request: MessageRequest):
419
+ try:
420
+ response = handle_query(request.message)
421
+ message_data = {
422
+ "sender": "User",
423
+ "message": request.message,
424
+ "response": response,
425
+ "timestamp": datetime.datetime.now().isoformat()
426
+ }
427
+ chat_history.append(message_data)
428
+ return {"response": response}
429
+ except Exception as e:
430
+ raise HTTPException(status_code=500, detail=f"Chat processing failed: {str(e)}")
431
+
432
  @app.get("/")
433
  def read_root():
434
+ return {"message": "Welcome to the Redfernstech API"}