Spaces:
Paused
Paused
Update app.py
Browse files
app.py
CHANGED
@@ -373,23 +373,6 @@ def generate_answer(message, choice, retrieval_mode, selected_model):
|
|
373 |
# Use GPT-4o with its vector store and template
|
374 |
retriever = gpt_retriever
|
375 |
prompt_template = QA_CHAIN_PROMPT_1 if choice == "Details" else QA_CHAIN_PROMPT_2
|
376 |
-
elif selected_model == phi_pipe:
|
377 |
-
# Use Phi-3.5 with its vector store and always use template2
|
378 |
-
retriever = phi_retriever
|
379 |
-
context = retriever.get_relevant_documents(message)
|
380 |
-
# Construct a simple, direct prompt for Phi-3.5
|
381 |
-
prompt = f"""
|
382 |
-
Based on the following information, provide a concise and well-formatted response without including questions or 'Helpful Answer' sections:
|
383 |
-
|
384 |
-
{context}
|
385 |
-
|
386 |
-
Information:
|
387 |
-
{message}
|
388 |
-
"""
|
389 |
-
|
390 |
-
if selected_model == chat_model:
|
391 |
-
# Use GPT-4o with Langchain
|
392 |
-
prompt_template = QA_CHAIN_PROMPT_2 # Always using template2 for simplicity
|
393 |
context = retriever.get_relevant_documents(message)
|
394 |
prompt = prompt_template.format(context=context, question=message)
|
395 |
|
@@ -403,14 +386,32 @@ def generate_answer(message, choice, retrieval_mode, selected_model):
|
|
403 |
return response['result'], extract_addresses(response['result'])
|
404 |
|
405 |
elif selected_model == phi_pipe:
|
406 |
-
# Use Phi-3.5
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
407 |
response = selected_model(prompt, **{
|
408 |
"max_new_tokens": 300,
|
409 |
"return_full_text": False,
|
410 |
"temperature": 0.5,
|
411 |
"do_sample": False,
|
412 |
-
})
|
413 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
414 |
|
415 |
elif retrieval_mode == "KGF":
|
416 |
response = chain_neo4j.invoke({"question": message})
|
|
|
373 |
# Use GPT-4o with its vector store and template
|
374 |
retriever = gpt_retriever
|
375 |
prompt_template = QA_CHAIN_PROMPT_1 if choice == "Details" else QA_CHAIN_PROMPT_2
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
376 |
context = retriever.get_relevant_documents(message)
|
377 |
prompt = prompt_template.format(context=context, question=message)
|
378 |
|
|
|
386 |
return response['result'], extract_addresses(response['result'])
|
387 |
|
388 |
elif selected_model == phi_pipe:
|
389 |
+
# Use Phi-3.5 with its vector store and a simplified prompt
|
390 |
+
retriever = phi_retriever
|
391 |
+
context = retriever.get_relevant_documents(message)
|
392 |
+
prompt = f"""
|
393 |
+
Here is the information based on the documents provided:
|
394 |
+
{context}
|
395 |
+
|
396 |
+
{message}
|
397 |
+
"""
|
398 |
+
|
399 |
+
logging.debug(f"Phi-3.5 Prompt: {prompt}")
|
400 |
+
|
401 |
response = selected_model(prompt, **{
|
402 |
"max_new_tokens": 300,
|
403 |
"return_full_text": False,
|
404 |
"temperature": 0.5,
|
405 |
"do_sample": False,
|
406 |
+
})
|
407 |
+
|
408 |
+
if response:
|
409 |
+
generated_text = response[0]['generated_text']
|
410 |
+
logging.debug(f"Phi-3.5 Response: {generated_text}")
|
411 |
+
return generated_text, extract_addresses(generated_text)
|
412 |
+
else:
|
413 |
+
logging.error("Phi-3.5 did not return any response.")
|
414 |
+
return "No response generated.", []
|
415 |
|
416 |
elif retrieval_mode == "KGF":
|
417 |
response = chain_neo4j.invoke({"question": message})
|