Spaces:
Paused
Paused
Update app.py
Browse files
app.py
CHANGED
@@ -392,7 +392,7 @@ chain_neo4j = (
|
|
392 |
# Short Prompt Template for Phi-3.5 Proprietary Model
|
393 |
|
394 |
phi_short_template = f"""
|
395 |
-
As an expert on Birmingham, Alabama, I will provide concise, accurate, and informative responses to your queries based on
|
396 |
Provide only the direct answer to the question without any follow-up questions.
|
397 |
{{context}}
|
398 |
Question: {{question}}
|
@@ -454,7 +454,7 @@ def generate_answer(message, choice, retrieval_mode, selected_model):
|
|
454 |
logging.debug(f"Phi-3.5 Prompt: {prompt}")
|
455 |
|
456 |
response = selected_model(prompt, **{
|
457 |
-
"max_new_tokens":
|
458 |
"return_full_text": False,
|
459 |
"temperature": 0.7, # Adjusted to avoid cutting off
|
460 |
"do_sample": True, # Allow sampling to increase response diversity
|
|
|
392 |
# Short Prompt Template for Phi-3.5 Proprietary Model
|
393 |
|
394 |
phi_short_template = f"""
|
395 |
+
As an expert on Birmingham, Alabama, I will provide concise, accurate, and informative responses to your queries based on 128 token limit . Given the sunny weather today, {current_date}, feel free to ask me anything you need to know about the city.
|
396 |
Provide only the direct answer to the question without any follow-up questions.
|
397 |
{{context}}
|
398 |
Question: {{question}}
|
|
|
454 |
logging.debug(f"Phi-3.5 Prompt: {prompt}")
|
455 |
|
456 |
response = selected_model(prompt, **{
|
457 |
+
"max_new_tokens": 128, # Increased to handle longer responses
|
458 |
"return_full_text": False,
|
459 |
"temperature": 0.7, # Adjusted to avoid cutting off
|
460 |
"do_sample": True, # Allow sampling to increase response diversity
|