hrguarinv commited on
Commit
a59981a
·
verified ·
1 Parent(s): bd9a151

Rename routes/query_handler.py to routes/input_handler.py

Browse files
routes/{query_handler.py → input_handler.py} RENAMED
@@ -1,8 +1,8 @@
1
  from fastapi import APIRouter, status, HTTPException
2
  from models.query import Query
3
  from routes import search_products, purchase, order_management, account_management, customer_support
4
- from services.nlp import recognize_intent, recognize_entities, extract_keywords, generate_response
5
- from services.utils import clean_text, encode_and_normalize, extract_order_id_from_query
6
 
7
 
8
  router = APIRouter()
@@ -23,28 +23,47 @@ FUNCTION_DESCRIPTIONS_FOR_ORDERS = {
23
  "cancel_order": "Process order cancellation requests"
24
  }
25
 
 
 
26
 
27
- def query_processing(query: Query):
28
- cleaned_text = clean_text(query.text)
29
- query.intent = recognize_intent(cleaned_text)
30
- query.entities = recognize_entities(cleaned_text)
31
- query.keywords = extract_keywords(cleaned_text)
32
- encoded_query = encode_and_normalize(cleaned_text)
33
 
34
- if query.intent == "search for products":
35
- return {"products": search_products.handle_search_products_by_keywords(encoded_query)}
36
 
37
- elif query.intent == "order management":
38
- order_id = extract_order_id_from_query(query.text)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
39
  if order_id:
40
  return order_management.handle_track_order(order_id)
41
  else:
42
- return "Please provide an Order Number"
43
  else:
44
  return None
45
 
46
 
47
  @router.post("/")
48
- async def handle_response(query: Query):
49
- context_from_elasticsearch = query_processing(query)
50
- return {"generative response": generate_response(query, context_from_elasticsearch)}
 
1
  from fastapi import APIRouter, status, HTTPException
2
  from models.query import Query
3
  from routes import search_products, purchase, order_management, account_management, customer_support
4
+ from services.nlp import recognize_intent, generate_response, recognize_speech
5
+ from services.utils import extract_order_id_from_query, generate_image_embedding, generate_text_embedding
6
 
7
 
8
  router = APIRouter()
 
23
  "cancel_order": "Process order cancellation requests"
24
  }
25
 
26
+ image_extensions = (".jpg", ".jpeg", ".png", ".bmp", ".gif", ".tiff")
27
+ audio_extensions = (".mp3", ".wav", ".flac", ".ogg", ".aac", ".m4a")
28
 
 
 
 
 
 
 
29
 
30
+ def query_processing(input: Input):
 
31
 
32
+ if input.text:
33
+ intent_from_text = recognize_intent(input.text)
34
+ encoded_text = generate_text_embedding(input.text)
35
+
36
+ print(f'Intent_from_text: {intent_from_text}')
37
+ print(f'Text embedding: {encoded_text.shape}')
38
+
39
+ if input.files:
40
+ for file in input.files:
41
+ if file.endswith(audio_extensions):
42
+ text_from_audio = recognize_speech(file)
43
+ print(f'Transcription: {text_from_audio}')
44
+ # history_openai_format.append({"role": "user", "content": message})
45
+ elif file.endswith(image_extensions):
46
+ image_vector = generate_image_embedding(file)
47
+ print(f'Shape of the image vector: {image_vector.shape}')
48
+ return {"Similar products to the image attached": search_products.handle_search_products_by_keywords(image_vector)}
49
+ # history_openai_format.append({"role": "user", "content": "User has sent an image."})
50
+ else:
51
+ return "Please attach a valid file (image or audio)"
52
+
53
+ if intent_from_text == "search for products":
54
+ return {"products related to the search": search_products.handle_search_products_by_keywords(encoded_text)}
55
+
56
+ elif intent_from_text == "order management":
57
+ order_id = extract_order_id_from_query(input.text)
58
  if order_id:
59
  return order_management.handle_track_order(order_id)
60
  else:
61
+ return "Please explain to the user that an order number should be provided in the chat interface"
62
  else:
63
  return None
64
 
65
 
66
  @router.post("/")
67
+ async def handle_input(input: Input):
68
+ context_from_elasticsearch = query_processing(input)
69
+ return {"generative response": generate_response(input, context_from_elasticsearch)}