Spaces:
Paused
Paused
Update app.py
Browse files
app.py
CHANGED
|
@@ -540,6 +540,22 @@ chain_neo4j = (
|
|
| 540 |
|
| 541 |
|
| 542 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 543 |
# Define the custom template for Phi-3.5
|
| 544 |
phi_custom_template = """
|
| 545 |
<|system|>
|
|
@@ -549,6 +565,7 @@ You are a helpful assistant.<|end|>
|
|
| 549 |
{question}<|end|>
|
| 550 |
<|assistant|>
|
| 551 |
"""
|
|
|
|
| 552 |
|
| 553 |
def generate_answer(message, choice, retrieval_mode, selected_model):
|
| 554 |
logging.debug(f"generate_answer called with choice: {choice}, retrieval_mode: {retrieval_mode}, and selected_model: {selected_model}")
|
|
|
|
| 540 |
|
| 541 |
|
| 542 |
|
| 543 |
+
import re
|
| 544 |
+
|
| 545 |
+
def clean_response(response_text):
|
| 546 |
+
# Remove any metadata-like information and focus on the main content
|
| 547 |
+
# Removes "Document(metadata=...)" and other similar patterns
|
| 548 |
+
cleaned_response = re.sub(r'Document\(metadata=.*?\),?\s*', '', response_text, flags=re.DOTALL)
|
| 549 |
+
cleaned_response = re.sub(r'page_content=".*?"\),?', '', cleaned_response, flags=re.DOTALL)
|
| 550 |
+
cleaned_response = re.sub(r'\[.*?\]', '', cleaned_response, flags=re.DOTALL) # Remove content in brackets
|
| 551 |
+
cleaned_response = re.sub(r'\s+', ' ', cleaned_response).strip()
|
| 552 |
+
# Remove any unwanted follow-up questions or unnecessary text
|
| 553 |
+
cleaned_response = re.sub(r'Question:.*\nAnswer:', '', cleaned_response, flags=re.DOTALL).strip()
|
| 554 |
+
return cleaned_response
|
| 555 |
+
|
| 556 |
+
|
| 557 |
+
|
| 558 |
+
|
| 559 |
# Define the custom template for Phi-3.5
|
| 560 |
phi_custom_template = """
|
| 561 |
<|system|>
|
|
|
|
| 565 |
{question}<|end|>
|
| 566 |
<|assistant|>
|
| 567 |
"""
|
| 568 |
+
import traceback
|
| 569 |
|
| 570 |
def generate_answer(message, choice, retrieval_mode, selected_model):
|
| 571 |
logging.debug(f"generate_answer called with choice: {choice}, retrieval_mode: {retrieval_mode}, and selected_model: {selected_model}")
|