PeterPinetree commited on
Commit
b078faf
·
verified ·
1 Parent(s): 1153581

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +11 -19
app.py CHANGED
@@ -136,10 +136,10 @@ def respond(
136
  chat_history: List[Tuple[str, str]],
137
  genre: Optional[str] = None,
138
  use_full_memory: bool = True
139
- ) -> List[Tuple[str, str]]:
140
  """Generate a response based on the current message and conversation history."""
141
  if not message.strip():
142
- return chat_history
143
 
144
  # Format messages for API
145
  formatted_messages = [{"role": "system", "content": get_enhanced_system_prompt(genre)}]
@@ -148,12 +148,12 @@ def respond(
148
  if chat_history and use_full_memory:
149
  for user_msg, bot_msg in chat_history[-MEMORY_WINDOW:]:
150
  formatted_messages.extend([
151
- {"role": "user", "content": user_msg},
152
- {"role": "assistant", "content": bot_msg}
153
  ])
154
 
155
  # Add current message
156
- formatted_messages.append({"role": "user", "content": message})
157
 
158
  try:
159
  # Make API call
@@ -161,23 +161,15 @@ def respond(
161
  messages=formatted_messages,
162
  max_tokens=MAX_TOKENS,
163
  temperature=TEMPERATURE,
164
- top_p=TOP_P,
165
- stream=False # Ensure non-streaming response
166
  )
167
 
168
- # Extract response safely using proper attribute access
169
- if hasattr(response.choices[0], 'message'):
170
- bot_message = response.choices[0].message.content
171
- else:
172
- bot_message = str(response.choices[0].delta.content)
173
-
174
- new_history = list(chat_history)
175
- new_history.append((message, bot_message))
176
- return new_history
177
 
178
  except Exception as e:
179
- error_message = f"Story magic temporarily interrupted. Please try again. (Error: {str(e)})"
180
- return list(chat_history) + [(message, error_message)]
181
 
182
  def save_story(chat_history):
183
  """Convert chat history to markdown for download"""
@@ -270,7 +262,7 @@ with gr.Blocks(theme=gr.themes.Soft()) as demo:
270
  # 4) Connect each starter button:
271
  for starter_button in starter_buttons:
272
  starter_button.click(
273
- fn=lambda x: [(str(x), "")], # Return initial history tuple
274
  inputs=[starter_button],
275
  outputs=[chatbot],
276
  queue=False
 
136
  chat_history: List[Tuple[str, str]],
137
  genre: Optional[str] = None,
138
  use_full_memory: bool = True
139
+ ) -> List[Dict[str, str]]: # Changed return type
140
  """Generate a response based on the current message and conversation history."""
141
  if not message.strip():
142
+ return [{"role": "assistant", "content": "Please enter a message"}]
143
 
144
  # Format messages for API
145
  formatted_messages = [{"role": "system", "content": get_enhanced_system_prompt(genre)}]
 
148
  if chat_history and use_full_memory:
149
  for user_msg, bot_msg in chat_history[-MEMORY_WINDOW:]:
150
  formatted_messages.extend([
151
+ {"role": "user", "content": str(user_msg)},
152
+ {"role": "assistant", "content": str(bot_msg)}
153
  ])
154
 
155
  # Add current message
156
+ formatted_messages.append({"role": "user", "content": str(message)})
157
 
158
  try:
159
  # Make API call
 
161
  messages=formatted_messages,
162
  max_tokens=MAX_TOKENS,
163
  temperature=TEMPERATURE,
164
+ top_p=TOP_P
 
165
  )
166
 
167
+ # Extract response
168
+ bot_message = response.choices[0].message.content
169
+ return [{"role": "assistant", "content": bot_message}]
 
 
 
 
 
 
170
 
171
  except Exception as e:
172
+ return [{"role": "assistant", "content": f"Story magic temporarily interrupted. Please try again. (Error: {str(e)})"}]
 
173
 
174
  def save_story(chat_history):
175
  """Convert chat history to markdown for download"""
 
262
  # 4) Connect each starter button:
263
  for starter_button in starter_buttons:
264
  starter_button.click(
265
+ fn=lambda x: [{"role": "user", "content": str(x)}], # Format as message dict
266
  inputs=[starter_button],
267
  outputs=[chatbot],
268
  queue=False