Spaces:
Paused
Paused
chore: update something
Browse files
app.py
CHANGED
@@ -32,7 +32,7 @@ from transformers.dynamic_module_utils import get_imports
|
|
32 |
from bs4 import BeautifulSoup
|
33 |
from functools import lru_cache
|
34 |
|
35 |
-
logging.basicConfig(level=logging.
|
36 |
logger = logging.getLogger(__name__)
|
37 |
|
38 |
|
@@ -459,9 +459,9 @@ def generate_chat(
|
|
459 |
|
460 |
# Add the system prompt to the conversation
|
461 |
if system_prompt:
|
462 |
-
if system_prompt.strip() == DEFAULT_SYSTEM_PROMPT:
|
463 |
system_prompt = (
|
464 |
-
system_prompt.strip() + "\
|
465 |
)
|
466 |
conversation.append({"role": "system", "content": system_prompt})
|
467 |
|
@@ -510,7 +510,7 @@ def generate_chat(
|
|
510 |
}
|
511 |
)
|
512 |
|
513 |
-
logger.
|
514 |
|
515 |
# Apply the chat template to convert the conversation into input_ids
|
516 |
input_ids = chat_tokenizer.apply_chat_template(
|
@@ -614,7 +614,7 @@ def generate_chat(
|
|
614 |
t = Thread(target=chat_model.generate, kwargs=generate_kwargs)
|
615 |
t.start()
|
616 |
|
617 |
-
logger.
|
618 |
f"UUID: {uuid} - Is apply tools: {apply_tools} - Is apply documents: {len(document_references) > 0} - Is previous response: {previous_response is not None} - Start generating chat responses"
|
619 |
)
|
620 |
|
@@ -673,9 +673,9 @@ def generate(
|
|
673 |
message["attachments"] = handle_file_extraction(
|
674 |
files=list(message["files"]), uuid=uuid
|
675 |
)
|
676 |
-
logger.
|
677 |
|
678 |
-
logger.
|
679 |
for idx, chat_pair in enumerate(chat_history):
|
680 |
user_message, assistant_message = chat_pair
|
681 |
if not isinstance(user_message, str) and assistant_message is None:
|
@@ -687,14 +687,14 @@ def generate(
|
|
687 |
)
|
688 |
chat_history[idx + 1][0] = chat_input
|
689 |
chat_history[idx] = [None, None]
|
690 |
-
logger.
|
691 |
f"UUID: {uuid} - Updated chat history: {chat_history} - Updated chat input: {chat_input}"
|
692 |
)
|
693 |
|
694 |
chat_history = list(
|
695 |
filter(lambda x: x[0] is not None and x[1] is not None, chat_history)
|
696 |
)
|
697 |
-
logger.
|
698 |
|
699 |
yield from generate_chat(
|
700 |
uuid=uuid,
|
|
|
32 |
from bs4 import BeautifulSoup
|
33 |
from functools import lru_cache
|
34 |
|
35 |
+
logging.basicConfig(level=logging.INFO)
|
36 |
logger = logging.getLogger(__name__)
|
37 |
|
38 |
|
|
|
459 |
|
460 |
# Add the system prompt to the conversation
|
461 |
if system_prompt:
|
462 |
+
if system_prompt.strip() == DEFAULT_SYSTEM_PROMPT.strip():
|
463 |
system_prompt = (
|
464 |
+
system_prompt.strip() + "\nAdditional context: " + client_info
|
465 |
)
|
466 |
conversation.append({"role": "system", "content": system_prompt})
|
467 |
|
|
|
510 |
}
|
511 |
)
|
512 |
|
513 |
+
logger.info(f"UUID: {uuid} - Conversation: {conversation}")
|
514 |
|
515 |
# Apply the chat template to convert the conversation into input_ids
|
516 |
input_ids = chat_tokenizer.apply_chat_template(
|
|
|
614 |
t = Thread(target=chat_model.generate, kwargs=generate_kwargs)
|
615 |
t.start()
|
616 |
|
617 |
+
logger.info(
|
618 |
f"UUID: {uuid} - Is apply tools: {apply_tools} - Is apply documents: {len(document_references) > 0} - Is previous response: {previous_response is not None} - Start generating chat responses"
|
619 |
)
|
620 |
|
|
|
673 |
message["attachments"] = handle_file_extraction(
|
674 |
files=list(message["files"]), uuid=uuid
|
675 |
)
|
676 |
+
logger.info(f"UUID: {uuid} - Image text extraction process completed")
|
677 |
|
678 |
+
logger.info(f"UUID: {uuid} - Previous chat history: {chat_history}")
|
679 |
for idx, chat_pair in enumerate(chat_history):
|
680 |
user_message, assistant_message = chat_pair
|
681 |
if not isinstance(user_message, str) and assistant_message is None:
|
|
|
687 |
)
|
688 |
chat_history[idx + 1][0] = chat_input
|
689 |
chat_history[idx] = [None, None]
|
690 |
+
logger.info(
|
691 |
f"UUID: {uuid} - Updated chat history: {chat_history} - Updated chat input: {chat_input}"
|
692 |
)
|
693 |
|
694 |
chat_history = list(
|
695 |
filter(lambda x: x[0] is not None and x[1] is not None, chat_history)
|
696 |
)
|
697 |
+
logger.info(f"UUID: {uuid} - Filtered chat history: {chat_history}")
|
698 |
|
699 |
yield from generate_chat(
|
700 |
uuid=uuid,
|