on1onmangoes commited on
Commit
4cceeb8
·
verified ·
1 Parent(s): 9a13b23

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +89 -23
app.py CHANGED
@@ -34,16 +34,17 @@ def stream_chat_with_rag(
34
 
35
  # Build the conversation prompt including system prompt and history
36
  conversation = system_prompt + "\n\n" + f"For Client: {client_name}\n"
37
- for user_input, assistant_response in history:
38
- conversation += f"User: {user_input}\nAssistant: {assistant_response}\n"
39
- conversation += f"User: {message}\nAssistant:"
 
40
 
41
  # Prepare the data to send to the API
42
- # Remove 'history' from the payload since the API does not accept it
43
  api_payload = {
44
- "message": conversation, # Include the history in the message
 
45
  "client_name": client_name,
46
- "system_prompt": "", # Optionally set to empty if included in message
47
  "num_retrieved_docs": num_retrieved_docs,
48
  "num_docs_final": num_docs_final,
49
  "temperature": temperature,
@@ -53,27 +54,92 @@ def stream_chat_with_rag(
53
  "penalty": penalty,
54
  }
55
 
56
- # Make the API call to get the assistant's reply
57
- response = client.predict(
58
- api_name="/chat",
59
- **api_payload
60
- )
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
61
 
62
- # Extract the assistant's reply
63
- if isinstance(response, tuple):
64
- answer = response[0]
65
- else:
66
- answer = response
67
 
68
- # Debugging statements
69
- print("The Answer in stream_chat_with_rag:")
70
- print(answer)
71
 
72
- # Update the conversation history
73
- history.append((message, answer))
74
 
75
- # Return the updated history
76
- #return history
77
 
78
 
79
  # Function to handle PDF processing API call
 
34
 
35
  # Build the conversation prompt including system prompt and history
36
  conversation = system_prompt + "\n\n" + f"For Client: {client_name}\n"
37
+ if history: # Check if history exists
38
+ for user_input, assistant_response in history:
39
+ conversation += f"User: {user_input}\nAssistant: {assistant_response}\n"
40
+ conversation += f"User: {message}\nAssistant:" # Add the current message
41
 
42
  # Prepare the data to send to the API
 
43
  api_payload = {
44
+ "message": conversation, # Include the history in the message,
45
+ "history": history
46
  "client_name": client_name,
47
+ "system_prompt": "", # Optionally set to empty if included in the message
48
  "num_retrieved_docs": num_retrieved_docs,
49
  "num_docs_final": num_docs_final,
50
  "temperature": temperature,
 
54
  "penalty": penalty,
55
  }
56
 
57
+ try:
58
+ # Make the API call to get the assistant's reply
59
+ response = client.predict(
60
+ api_name="/chat",
61
+ **api_payload
62
+ )
63
+
64
+ # Extract the assistant's reply
65
+ if isinstance(response, tuple):
66
+ answer = response[0]
67
+ else:
68
+ answer = response
69
+
70
+ # Debugging statements
71
+ print("The Answer in stream_chat_with_rag:")
72
+ print(answer)
73
+
74
+ # Update the conversation history
75
+ history.append((message, answer))
76
+
77
+ # except Exception as e:
78
+ # print(f"An error occurred: {e}")
79
+ # answer = "There was an error retrieving the response."
80
+
81
+ # # Return the updated history
82
+ # return history
83
+
84
+
85
+ # def stream_chat_with_rag(
86
+ # message: str,
87
+ # history: list,
88
+ # client_name: str,
89
+ # system_prompt: str,
90
+ # num_retrieved_docs: int = 10,
91
+ # num_docs_final: int = 9,
92
+ # temperature: float = 0,
93
+ # max_new_tokens: int = 1024,
94
+ # top_p: float = 1.0,
95
+ # top_k: int = 20,
96
+ # penalty: float = 1.2,
97
+ # ):
98
+ # print(f"Message: {message}")
99
+ # print(f"History: {history}")
100
+
101
+ # # Build the conversation prompt including system prompt and history
102
+ # conversation = system_prompt + "\n\n" + f"For Client: {client_name}\n"
103
+ # for user_input, assistant_response in history:
104
+ # conversation += f"User: {user_input}\nAssistant: {assistant_response}\n"
105
+ # conversation += f"User: {message}\nAssistant:"
106
+
107
+ # # Prepare the data to send to the API
108
+ # # Remove 'history' from the payload since the API does not accept it
109
+ # api_payload = {
110
+ # "message": conversation, # Include the history in the message
111
+ # "client_name": client_name,
112
+ # "system_prompt": "", # Optionally set to empty if included in message
113
+ # "num_retrieved_docs": num_retrieved_docs,
114
+ # "num_docs_final": num_docs_final,
115
+ # "temperature": temperature,
116
+ # "max_new_tokens": max_new_tokens,
117
+ # "top_p": top_p,
118
+ # "top_k": top_k,
119
+ # "penalty": penalty,
120
+ # }
121
+
122
+ # # Make the API call to get the assistant's reply
123
+ # response = client.predict(
124
+ # api_name="/chat",
125
+ # **api_payload
126
+ # )
127
 
128
+ # # Extract the assistant's reply
129
+ # if isinstance(response, tuple):
130
+ # answer = response[0]
131
+ # else:
132
+ # answer = response
133
 
134
+ # # Debugging statements
135
+ # print("The Answer in stream_chat_with_rag:")
136
+ # print(answer)
137
 
138
+ # # Update the conversation history
139
+ # history.append((message, answer))
140
 
141
+ # # Return the updated history
142
+ # #return history
143
 
144
 
145
  # Function to handle PDF processing API call