Spaces:
Paused
Paused
Update app.py
Browse files
app.py
CHANGED
|
@@ -389,15 +389,15 @@ chain_neo4j = (
|
|
| 389 |
# return "Sorry, I encountered an error while processing your request.", []
|
| 390 |
|
| 391 |
|
| 392 |
-
# Short Prompt Template for Phi-3.5 Proprietary Model
|
| 393 |
|
| 394 |
-
phi_short_template = f"""
|
| 395 |
-
As an expert on Birmingham, Alabama, I will provide concise, accurate, and informative responses to your queries based on 128 token limit . Given the sunny weather today, {current_date}, feel free to ask me anything you need to know about the city.
|
| 396 |
-
Provide only the direct answer to the question without any follow-up questions.
|
| 397 |
-
{{context}}
|
| 398 |
-
Question: {{question}}
|
| 399 |
-
Answer:
|
| 400 |
-
"""
|
| 401 |
|
| 402 |
|
| 403 |
import re
|
|
@@ -449,11 +449,12 @@ def generate_answer(message, choice, retrieval_mode, selected_model):
|
|
| 449 |
elif selected_model == phi_pipe:
|
| 450 |
retriever = phi_retriever
|
| 451 |
context = retriever.get_relevant_documents(message)
|
| 452 |
-
|
| 453 |
-
|
| 454 |
-
|
|
|
|
| 455 |
|
| 456 |
-
response = selected_model(
|
| 457 |
"max_new_tokens": 128, # Increased to handle longer responses
|
| 458 |
"return_full_text": False,
|
| 459 |
"temperature": 0.7, # Adjusted to avoid cutting off
|
|
|
|
| 389 |
# return "Sorry, I encountered an error while processing your request.", []
|
| 390 |
|
| 391 |
|
| 392 |
+
# # Short Prompt Template for Phi-3.5 Proprietary Model
|
| 393 |
|
| 394 |
+
# phi_short_template = f"""
|
| 395 |
+
# As an expert on Birmingham, Alabama, I will provide concise, accurate, and informative responses to your queries based on 128 token limit . Given the sunny weather today, {current_date}, feel free to ask me anything you need to know about the city.
|
| 396 |
+
# Provide only the direct answer to the question without any follow-up questions.
|
| 397 |
+
# {{context}}
|
| 398 |
+
# Question: {{question}}
|
| 399 |
+
# Answer:
|
| 400 |
+
# """
|
| 401 |
|
| 402 |
|
| 403 |
import re
|
|
|
|
| 449 |
elif selected_model == phi_pipe:
|
| 450 |
retriever = phi_retriever
|
| 451 |
context = retriever.get_relevant_documents(message)
|
| 452 |
+
|
| 453 |
+
# Format the Phi-3.5 prompt according to the required format
|
| 454 |
+
phi_prompt = f"<|system|>\nYou are a helpful assistant.<|end|>\n<|user|>\n{message}\n<|end|>\n<|assistant|>\n"
|
| 455 |
+
logging.debug(f"Phi-3.5 Prompt: {phi_prompt}")
|
| 456 |
|
| 457 |
+
response = selected_model(phi_prompt, **{
|
| 458 |
"max_new_tokens": 128, # Increased to handle longer responses
|
| 459 |
"return_full_text": False,
|
| 460 |
"temperature": 0.7, # Adjusted to avoid cutting off
|