lamhieu commited on
Commit
a5b9863
·
1 Parent(s): a17a0ff

chore: update something

Browse files
Files changed (1) hide show
  1. app.py +8 -8
app.py CHANGED
@@ -468,9 +468,9 @@ def generate_chat(
468
 
469
  # Add the system prompt to the conversation
470
  if system_prompt:
471
- if system_prompt.strip() == DEFAULT_SYSTEM_PROMPT:
472
  system_prompt = (
473
- system_prompt.strip() + "\n\nAdditional context: " + client_info
474
  )
475
  conversation.append({"role": "system", "content": system_prompt})
476
 
@@ -519,7 +519,7 @@ def generate_chat(
519
  }
520
  )
521
 
522
- logger.debug(f"UUID: {uuid} - Conversation: {conversation}")
523
 
524
  # Apply the chat template to convert the conversation into input_ids
525
  input_ids = chat_tokenizer.apply_chat_template(
@@ -623,7 +623,7 @@ def generate_chat(
623
  t = Thread(target=chat_model.generate, kwargs=generate_kwargs)
624
  t.start()
625
 
626
- logger.debug(
627
  f"UUID: {uuid} - Is apply tools: {apply_tools} - Is apply documents: {len(document_references) > 0} - Is previous response: {previous_response is not None} - Start generating chat responses"
628
  )
629
 
@@ -682,9 +682,9 @@ def generate(
682
  message["attachments"] = handle_file_extraction(
683
  files=list(message["files"]), uuid=uuid
684
  )
685
- logger.debug(f"UUID: {uuid} - Image text extraction process completed")
686
 
687
- logger.debug(f"UUID: {uuid} - Previous chat history: {chat_history}")
688
  for idx, chat_pair in enumerate(chat_history):
689
  user_message, assistant_message = chat_pair
690
  if not isinstance(user_message, str) and assistant_message is None:
@@ -696,14 +696,14 @@ def generate(
696
  )
697
  chat_history[idx + 1][0] = chat_input
698
  chat_history[idx] = [None, None]
699
- logger.debug(
700
  f"UUID: {uuid} - Updated chat history: {chat_history} - Updated chat input: {chat_input}"
701
  )
702
 
703
  chat_history = list(
704
  filter(lambda x: x[0] is not None and x[1] is not None, chat_history)
705
  )
706
- logger.debug(f"UUID: {uuid} - Filtered chat history: {chat_history}")
707
 
708
  yield from generate_chat(
709
  uuid=uuid,
 
468
 
469
  # Add the system prompt to the conversation
470
  if system_prompt:
471
+ if system_prompt.strip() == DEFAULT_SYSTEM_PROMPT.strip():
472
  system_prompt = (
473
+ system_prompt.strip() + "\nAdditional context: " + client_info
474
  )
475
  conversation.append({"role": "system", "content": system_prompt})
476
 
 
519
  }
520
  )
521
 
522
+ logger.info(f"UUID: {uuid} - Conversation: {conversation}")
523
 
524
  # Apply the chat template to convert the conversation into input_ids
525
  input_ids = chat_tokenizer.apply_chat_template(
 
623
  t = Thread(target=chat_model.generate, kwargs=generate_kwargs)
624
  t.start()
625
 
626
+ logger.info(
627
  f"UUID: {uuid} - Is apply tools: {apply_tools} - Is apply documents: {len(document_references) > 0} - Is previous response: {previous_response is not None} - Start generating chat responses"
628
  )
629
 
 
682
  message["attachments"] = handle_file_extraction(
683
  files=list(message["files"]), uuid=uuid
684
  )
685
+ logger.info(f"UUID: {uuid} - Image text extraction process completed")
686
 
687
+ logger.info(f"UUID: {uuid} - Previous chat history: {chat_history}")
688
  for idx, chat_pair in enumerate(chat_history):
689
  user_message, assistant_message = chat_pair
690
  if not isinstance(user_message, str) and assistant_message is None:
 
696
  )
697
  chat_history[idx + 1][0] = chat_input
698
  chat_history[idx] = [None, None]
699
+ logger.info(
700
  f"UUID: {uuid} - Updated chat history: {chat_history} - Updated chat input: {chat_input}"
701
  )
702
 
703
  chat_history = list(
704
  filter(lambda x: x[0] is not None and x[1] is not None, chat_history)
705
  )
706
+ logger.info(f"UUID: {uuid} - Filtered chat history: {chat_history}")
707
 
708
  yield from generate_chat(
709
  uuid=uuid,