Pijush2023 commited on
Commit
eeeece1
·
verified ·
1 Parent(s): 512d879

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +16 -4
app.py CHANGED
@@ -371,7 +371,21 @@ def generate_answer(message, choice, retrieval_mode, selected_model):
371
  # Use different retrievers based on the selected model
372
  if selected_model == phi_pipe:
373
  retriever = phi_retriever
374
- prompt = f"Based on the provided documents, {message}"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
375
  else:
376
  retriever = gpt_retriever
377
  prompt_template = QA_CHAIN_PROMPT_1 if choice == "Details" else QA_CHAIN_PROMPT_2
@@ -392,9 +406,7 @@ def generate_answer(message, choice, retrieval_mode, selected_model):
392
 
393
  elif selected_model == phi_pipe:
394
  # Use Phi-3.5 directly with the formatted prompt and specific vector store
395
- context = retriever.get_relevant_documents(message)
396
- full_prompt = f"{context}\n\n{prompt}"
397
- response = selected_model(full_prompt, **{
398
  "max_new_tokens": 300,
399
  "return_full_text": False,
400
  "temperature": 0.5,
 
371
  # Use different retrievers based on the selected model
372
  if selected_model == phi_pipe:
373
  retriever = phi_retriever
374
+ # Retrieve context from vector store
375
+ context = retriever.get_relevant_documents(message)
376
+ # Construct a prompt with clear formatting instructions
377
+ prompt = f"""
378
+ Based on the provided documents, generate a well-formatted response with the following details:
379
+ {context}
380
+
381
+ Please format the output as follows:
382
+ Name: [Event Name]
383
+ Location: [Location]
384
+ Date and Time: [Date and Time]
385
+ Description: [Event Description]
386
+
387
+ Question: {message}
388
+ """
389
  else:
390
  retriever = gpt_retriever
391
  prompt_template = QA_CHAIN_PROMPT_1 if choice == "Details" else QA_CHAIN_PROMPT_2
 
406
 
407
  elif selected_model == phi_pipe:
408
  # Use Phi-3.5 directly with the formatted prompt and specific vector store
409
+ response = selected_model(prompt, **{
 
 
410
  "max_new_tokens": 300,
411
  "return_full_text": False,
412
  "temperature": 0.5,