Spaces:
Running
Running
Update app.py
Browse files
app.py
CHANGED
@@ -138,7 +138,7 @@ def format_history_for_gradio(history_tuples):
|
|
138 |
def respond(
|
139 |
message: str,
|
140 |
chat_history: List[Tuple[str, str]],
|
141 |
-
genre: Optional[str] = None,
|
142 |
use_full_memory: bool = True
|
143 |
) -> Tuple[str, List[Tuple[str, str]]]:
|
144 |
"""Generate a response based on the current message and conversation history."""
|
@@ -146,21 +146,19 @@ def respond(
|
|
146 |
return "", chat_history
|
147 |
|
148 |
try:
|
149 |
-
#
|
150 |
api_messages = [{"role": "system", "content": get_enhanced_system_prompt(genre)}]
|
151 |
|
152 |
-
# Add chat history
|
153 |
if chat_history and use_full_memory:
|
154 |
for user_msg, bot_msg in chat_history[-MEMORY_WINDOW:]:
|
155 |
-
api_messages.
|
156 |
-
|
157 |
-
{"role": "assistant", "content": str(bot_msg)}
|
158 |
-
])
|
159 |
|
160 |
-
# Add
|
161 |
api_messages.append({"role": "user", "content": str(message)})
|
162 |
|
163 |
-
# Make API call
|
164 |
response = client.chat_completion(
|
165 |
messages=api_messages,
|
166 |
max_tokens=MAX_TOKENS,
|
@@ -168,18 +166,17 @@ def respond(
|
|
168 |
top_p=TOP_P
|
169 |
)
|
170 |
|
171 |
-
#
|
172 |
bot_message = response.choices[0].message.content
|
173 |
|
174 |
-
# Update chat history
|
175 |
updated_history = chat_history + [(message, bot_message)]
|
176 |
-
|
177 |
-
# Return updated components
|
178 |
return "", updated_history
|
179 |
-
|
180 |
except Exception as e:
|
|
|
181 |
error_msg = f"Story magic temporarily interrupted. Please try again. (Error: {str(e)})"
|
182 |
-
logging.error("Error in respond function", exc_info=True)
|
183 |
return "", chat_history + [(message, error_msg)]
|
184 |
|
185 |
def save_story(chat_history):
|
|
|
138 |
def respond(
|
139 |
message: str,
|
140 |
chat_history: List[Tuple[str, str]],
|
141 |
+
genre: Optional[str] = None,
|
142 |
use_full_memory: bool = True
|
143 |
) -> Tuple[str, List[Tuple[str, str]]]:
|
144 |
"""Generate a response based on the current message and conversation history."""
|
|
|
146 |
return "", chat_history
|
147 |
|
148 |
try:
|
149 |
+
# Start with the system prompt
|
150 |
api_messages = [{"role": "system", "content": get_enhanced_system_prompt(genre)}]
|
151 |
|
152 |
+
# Add formatted chat history
|
153 |
if chat_history and use_full_memory:
|
154 |
for user_msg, bot_msg in chat_history[-MEMORY_WINDOW:]:
|
155 |
+
api_messages.append({"role": "user", "content": str(user_msg)})
|
156 |
+
api_messages.append({"role": "assistant", "content": str(bot_msg)})
|
|
|
|
|
157 |
|
158 |
+
# Add the user's latest message
|
159 |
api_messages.append({"role": "user", "content": str(message)})
|
160 |
|
161 |
+
# Make the API call
|
162 |
response = client.chat_completion(
|
163 |
messages=api_messages,
|
164 |
max_tokens=MAX_TOKENS,
|
|
|
166 |
top_p=TOP_P
|
167 |
)
|
168 |
|
169 |
+
# Extract response content
|
170 |
bot_message = response.choices[0].message.content
|
171 |
|
172 |
+
# Update chat history
|
173 |
updated_history = chat_history + [(message, bot_message)]
|
|
|
|
|
174 |
return "", updated_history
|
175 |
+
|
176 |
except Exception as e:
|
177 |
+
# Enhanced error handling
|
178 |
error_msg = f"Story magic temporarily interrupted. Please try again. (Error: {str(e)})"
|
179 |
+
logging.error("Error in respond function", exc_info=True)
|
180 |
return "", chat_history + [(message, error_msg)]
|
181 |
|
182 |
def save_story(chat_history):
|