Pijush2023 commited on
Commit
512d879
·
verified ·
1 Parent(s): 79a2714

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +19 -6
app.py CHANGED
@@ -105,6 +105,17 @@ phi_pipe = initialize_phi_model()
105
  gpt_model = initialize_gpt_model()
106
 
107
 
 
 
 
 
 
 
 
 
 
 
 
108
 
109
  # Pinecone setup
110
  from pinecone import Pinecone
@@ -357,11 +368,12 @@ def generate_answer(message, choice, retrieval_mode, selected_model):
357
  response = fetch_google_flights()
358
  return response, extract_addresses(response)
359
 
360
- # Use a simple, direct prompt for Phi-3.5
361
  if selected_model == phi_pipe:
362
- prompt = f"Here is the information : , {message}"
 
363
  else:
364
- # Use the existing prompt templates for GPT-4o
365
  prompt_template = QA_CHAIN_PROMPT_1 if choice == "Details" else QA_CHAIN_PROMPT_2
366
  context = retriever.get_relevant_documents(message)
367
  prompt = prompt_template.format(context=context, question=message)
@@ -379,8 +391,10 @@ def generate_answer(message, choice, retrieval_mode, selected_model):
379
  return response['result'], extract_addresses(response['result'])
380
 
381
  elif selected_model == phi_pipe:
382
- # Use Phi-3.5 directly with the formatted prompt
383
- response = selected_model(prompt, **{
 
 
384
  "max_new_tokens": 300,
385
  "return_full_text": False,
386
  "temperature": 0.5,
@@ -400,7 +414,6 @@ def generate_answer(message, choice, retrieval_mode, selected_model):
400
 
401
 
402
 
403
-
404
  # def bot(history, choice, tts_choice, retrieval_mode):
405
  # if not history:
406
  # return history
 
105
  gpt_model = initialize_gpt_model()
106
 
107
 
108
+ # Existing embeddings and vector store for GPT-4o
109
+ gpt_embeddings = OpenAIEmbeddings(api_key=os.environ['OPENAI_API_KEY'])
110
+ gpt_vectorstore = PineconeVectorStore(index_name="radarfinaldata08192024", embedding=gpt_embeddings)
111
+ gpt_retriever = gpt_vectorstore.as_retriever(search_kwargs={'k': 5})
112
+
113
+ # New vector store setup for Phi-3.5
114
+ phi_embeddings = OpenAIEmbeddings(api_key=os.environ['OPENAI_API_KEY'])
115
+ phi_vectorstore = PineconeVectorStore(index_name="phivector08252024", embedding=phi_embeddings)
116
+ phi_retriever = phi_vectorstore.as_retriever(search_kwargs={'k': 5})
117
+
118
+
119
 
120
  # Pinecone setup
121
  from pinecone import Pinecone
 
368
  response = fetch_google_flights()
369
  return response, extract_addresses(response)
370
 
371
+ # Use different retrievers based on the selected model
372
  if selected_model == phi_pipe:
373
+ retriever = phi_retriever
374
+ prompt = f"Based on the provided documents, {message}"
375
  else:
376
+ retriever = gpt_retriever
377
  prompt_template = QA_CHAIN_PROMPT_1 if choice == "Details" else QA_CHAIN_PROMPT_2
378
  context = retriever.get_relevant_documents(message)
379
  prompt = prompt_template.format(context=context, question=message)
 
391
  return response['result'], extract_addresses(response['result'])
392
 
393
  elif selected_model == phi_pipe:
394
+ # Use Phi-3.5 directly with the formatted prompt and specific vector store
395
+ context = retriever.get_relevant_documents(message)
396
+ full_prompt = f"{context}\n\n{prompt}"
397
+ response = selected_model(full_prompt, **{
398
  "max_new_tokens": 300,
399
  "return_full_text": False,
400
  "temperature": 0.5,
 
414
 
415
 
416
 
 
417
  # def bot(history, choice, tts_choice, retrieval_mode):
418
  # if not history:
419
  # return history