on1onmangoes commited on
Commit
7174ef9
·
verified ·
1 Parent(s): 6464518

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +7 -7
app.py CHANGED
@@ -33,17 +33,17 @@ def stream_chat_with_rag(
33
  print(f"History: {history}")
34
 
35
  # Build the conversation prompt including system prompt and history
36
- conversation = system_prompt + "\n\n" + "For Client: " + client_name + "\n"
37
  for user_input, assistant_response in history:
38
  conversation += f"User: {user_input}\nAssistant: {assistant_response}\n"
39
  conversation += f"User: {message}\nAssistant:"
40
 
41
  # Prepare the data to send to the API
 
42
  api_payload = {
43
- "message": message,
44
- "history": history,
45
  "client_name": client_name,
46
- "system_prompt": system_prompt,
47
  "num_retrieved_docs": num_retrieved_docs,
48
  "num_docs_final": num_docs_final,
49
  "temperature": temperature,
@@ -59,12 +59,11 @@ def stream_chat_with_rag(
59
  **api_payload
60
  )
61
 
62
- # Assuming the API returns the assistant's reply
63
- # If the API returns a tuple (answer, relevant_docs), extract the answer
64
  if isinstance(response, tuple):
65
  answer = response[0]
66
  else:
67
- answer = response # If it's already the assistant's reply
68
 
69
  # Debugging statements
70
  print("The Answer in stream_chat_with_rag:")
@@ -76,6 +75,7 @@ def stream_chat_with_rag(
76
  # Return the updated history
77
  return history
78
 
 
79
  # Function to handle PDF processing API call
80
  def process_pdf(pdf_file):
81
  return client.predict(
 
33
  print(f"History: {history}")
34
 
35
  # Build the conversation prompt including system prompt and history
36
+ conversation = system_prompt + "\n\n" + f"For Client: {client_name}\n"
37
  for user_input, assistant_response in history:
38
  conversation += f"User: {user_input}\nAssistant: {assistant_response}\n"
39
  conversation += f"User: {message}\nAssistant:"
40
 
41
  # Prepare the data to send to the API
42
+ # Remove 'history' from the payload since the API does not accept it
43
  api_payload = {
44
+ "message": conversation, # Include the history in the message
 
45
  "client_name": client_name,
46
+ "system_prompt": "", # Optionally set to empty if included in message
47
  "num_retrieved_docs": num_retrieved_docs,
48
  "num_docs_final": num_docs_final,
49
  "temperature": temperature,
 
59
  **api_payload
60
  )
61
 
62
+ # Extract the assistant's reply
 
63
  if isinstance(response, tuple):
64
  answer = response[0]
65
  else:
66
+ answer = response
67
 
68
  # Debugging statements
69
  print("The Answer in stream_chat_with_rag:")
 
75
  # Return the updated history
76
  return history
77
 
78
+
79
  # Function to handle PDF processing API call
80
  def process_pdf(pdf_file):
81
  return client.predict(