PeterPinetree commited on
Commit
b377514
·
verified ·
1 Parent(s): 5616d3e

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +62 -72
app.py CHANGED
@@ -73,7 +73,6 @@ def get_examples_for_genre(genre):
73
  def get_enhanced_system_prompt(genre=None):
74
  """Generate a detailed system prompt with optional genre specification"""
75
  selected_genre = genre or "fantasy"
76
-
77
  system_message = f"""You are an interactive storyteller creating an immersive {selected_genre} choose-your-own-adventure story.
78
  For each response:
79
  1. Provide vivid sensory descriptions of the scene, environment, and characters
@@ -87,90 +86,70 @@ Format your three options as:
87
  - Option 2: [Complete sentence describing a possible action]
88
  - Option 3: [Complete sentence describing a possible action]
89
  Keep responses engaging but concise (200-300 words maximum). If the user's input doesn't clearly indicate a choice, interpret their intent and move the story forward in the most logical direction."""
90
-
91
  return system_message
92
 
93
  def create_story_summary(chat_history):
94
  """Create a concise summary of the story so far if the history gets too long"""
95
- if len(chat_history) <= 2: # Not enough history to summarize
96
  return None
97
 
98
- # Extract just the content for summarization
99
  story_text = ""
100
  for user_msg, bot_msg in chat_history:
101
  story_text += f"User: {user_msg}\nStory: {bot_msg}\n\n"
102
 
103
- # Add a summary instruction
104
  summary_instruction = {
105
  "role": "system",
106
  "content": "The conversation history is getting long. Please create a brief summary of the key plot points and character development so far to help maintain context without exceeding token limits."
107
  }
108
-
109
  return summary_instruction
110
 
111
  def respond(message, chat_history, genre=None, use_full_memory=True):
112
  """Generate a response based on the current message and conversation history"""
113
- # Use a more detailed system prompt
114
  system_message = get_enhanced_system_prompt(genre)
115
-
116
- # Format history correctly for the API
117
  formatted_history = []
118
  for user_msg, bot_msg in chat_history:
119
  formatted_history.append({"role": "user", "content": user_msg})
120
  formatted_history.append({"role": "assistant", "content": bot_msg})
121
 
122
- # Create proper API messages
123
  api_messages = [{"role": "system", "content": system_message}]
124
 
125
- # Token management strategy
126
- # Option 1: Use full history but potentially hit token limits
127
  if use_full_memory and formatted_history:
128
- # If there's a lot of history, we might need a summary
129
- if len(formatted_history) > 20: # Arbitrary threshold, adjust as needed
130
  summary_instruction = create_story_summary(chat_history[:len(chat_history)-5])
131
  if summary_instruction:
132
  api_messages.append(summary_instruction)
133
-
134
- # Add only the most recent exchanges after the summary
135
  for msg in formatted_history[-10:]:
136
  api_messages.append(msg)
137
  else:
138
- # Add all history if it's not too long
139
  for msg in formatted_history:
140
  api_messages.append(msg)
141
- # Option 2: Limited history - fallback if full memory is disabled
142
  else:
143
- # Set a larger memory length but still have a limit
144
- memory_length = 10 # Increased from 5
145
  if formatted_history:
146
  for msg in formatted_history[-memory_length*2:]:
147
  api_messages.append(msg)
148
 
149
- # Add current message
150
  api_messages.append({"role": "user", "content": message})
151
 
152
- # Special handling for story initialization
153
  if not chat_history or message.lower() in ["start", "begin", "begin my adventure"]:
154
- # Add a specific instruction for starting a new story
155
  api_messages.append({
156
  "role": "system",
157
- "content": f"Begin a new {genre or 'fantasy'} adventure with an intriguing opening scene. Introduce the protagonist without assuming too much about them, allowing the user to shape the character."
158
  })
159
 
160
- # Generate and stream response
161
  bot_message = ""
162
  try:
163
- for response in client.chat_completion(
164
  api_messages,
165
  max_tokens=512,
166
  stream=True,
167
  temperature=0.7,
168
  top_p=0.95,
169
  ):
170
- delta = response.choices[0].delta.content
171
  if delta:
172
  bot_message += delta
173
- # Create a new list for the updated chat history
174
  new_history = chat_history.copy()
175
  new_history.append((message, bot_message))
176
  yield new_history
@@ -197,11 +176,13 @@ with gr.Blocks(theme=gr.themes.Soft()) as demo:
197
 
198
  with gr.Row():
199
  with gr.Column(scale=3):
 
200
  chatbot = gr.Chatbot(
201
  height=500,
202
  bubble_full_width=False,
203
  show_copy_button=True,
204
  avatar_images=(None, "🧙"),
 
205
  )
206
  msg = gr.Textbox(
207
  placeholder="Describe what you want to do next in the story...",
@@ -222,62 +203,64 @@ with gr.Blocks(theme=gr.themes.Soft()) as demo:
222
  value="fantasy"
223
  )
224
 
225
- # Add a memory toggle option
226
  full_memory = gr.Checkbox(
227
  label="Full Story Memory",
228
  value=True,
229
- info="When enabled, the AI will try to remember the entire story"
230
  )
231
 
232
- # Create story starter buttons that automatically submit
233
  gr.Markdown("## Story Starters")
234
- story_starters = []
235
 
236
- def create_example_click_handler(example_text):
237
- def example_click():
238
- return example_text
239
- return example_click
 
 
240
 
241
- # Create a start button
242
- start_btn = gr.Button("Begin my adventure", variant="secondary")
243
- start_btn.click(
244
- fn=create_example_click_handler("Begin my adventure"),
245
- outputs=[msg],
246
- queue=False
247
- ).then(
248
- fn=respond,
249
- inputs=[msg, chatbot, genre, full_memory],
250
- outputs=[chatbot]
251
- )
 
 
 
 
 
 
 
252
 
253
- # Create example buttons for the current genre
254
- example_buttons = []
255
- for i, example in enumerate(get_examples_for_genre("fantasy")):
256
- btn = gr.Button(example, elem_id=f"example_{i}")
257
- btn.click(
258
- fn=create_example_click_handler(example),
 
259
  outputs=[msg],
260
  queue=False
261
  ).then(
262
  fn=respond,
263
  inputs=[msg, chatbot, genre, full_memory],
264
- outputs=[chatbot]
 
265
  )
266
- example_buttons.append(btn)
267
-
268
- # Function to update example buttons when genre changes
269
- def update_example_buttons(genre):
270
- examples = get_examples_for_genre(genre)
271
- return [gr.Button.update(value=example) for example in examples[:4]] # Limit to 4 examples
272
-
273
- # Connect genre dropdown to update example buttons
274
- genre.change(
275
- fn=update_example_buttons,
276
- inputs=[genre],
277
- outputs=example_buttons
278
- )
279
 
280
- # Set up event handlers for the chatbot
281
  msg.submit(respond, [msg, chatbot, genre, full_memory], [chatbot])
282
  submit.click(respond, [msg, chatbot, genre, full_memory], [chatbot])
283
 
@@ -285,13 +268,20 @@ with gr.Blocks(theme=gr.themes.Soft()) as demo:
285
  clear.click(lambda: [], None, chatbot, queue=False)
286
  clear.click(lambda: "", None, msg, queue=False)
287
 
288
- # Add a download story button
289
  with gr.Row():
290
- save_btn = gr.Button("Save Story as Markdown", variant="secondary")
291
  story_output = gr.Markdown(visible=False)
292
 
 
293
  save_btn.click(save_story, inputs=[chatbot], outputs=[story_output])
294
- save_btn.click(lambda: True, None, story_output, js="() => {document.getElementById('story_output').scrollIntoView();}", queue=False)
 
 
 
 
 
 
295
 
296
  if __name__ == "__main__":
297
- demo.launch(server_name="0.0.0.0", server_port=7860)
 
73
  def get_enhanced_system_prompt(genre=None):
74
  """Generate a detailed system prompt with optional genre specification"""
75
  selected_genre = genre or "fantasy"
 
76
  system_message = f"""You are an interactive storyteller creating an immersive {selected_genre} choose-your-own-adventure story.
77
  For each response:
78
  1. Provide vivid sensory descriptions of the scene, environment, and characters
 
86
  - Option 2: [Complete sentence describing a possible action]
87
  - Option 3: [Complete sentence describing a possible action]
88
  Keep responses engaging but concise (200-300 words maximum). If the user's input doesn't clearly indicate a choice, interpret their intent and move the story forward in the most logical direction."""
 
89
  return system_message
90
 
91
  def create_story_summary(chat_history):
92
  """Create a concise summary of the story so far if the history gets too long"""
93
+ if len(chat_history) <= 2:
94
  return None
95
 
 
96
  story_text = ""
97
  for user_msg, bot_msg in chat_history:
98
  story_text += f"User: {user_msg}\nStory: {bot_msg}\n\n"
99
 
 
100
  summary_instruction = {
101
  "role": "system",
102
  "content": "The conversation history is getting long. Please create a brief summary of the key plot points and character development so far to help maintain context without exceeding token limits."
103
  }
 
104
  return summary_instruction
105
 
106
  def respond(message, chat_history, genre=None, use_full_memory=True):
107
  """Generate a response based on the current message and conversation history"""
 
108
  system_message = get_enhanced_system_prompt(genre)
109
+
 
110
  formatted_history = []
111
  for user_msg, bot_msg in chat_history:
112
  formatted_history.append({"role": "user", "content": user_msg})
113
  formatted_history.append({"role": "assistant", "content": bot_msg})
114
 
 
115
  api_messages = [{"role": "system", "content": system_message}]
116
 
 
 
117
  if use_full_memory and formatted_history:
118
+ if len(formatted_history) > 20: # Arbitrary threshold
 
119
  summary_instruction = create_story_summary(chat_history[:len(chat_history)-5])
120
  if summary_instruction:
121
  api_messages.append(summary_instruction)
 
 
122
  for msg in formatted_history[-10:]:
123
  api_messages.append(msg)
124
  else:
 
125
  for msg in formatted_history:
126
  api_messages.append(msg)
 
127
  else:
128
+ memory_length = 10
 
129
  if formatted_history:
130
  for msg in formatted_history[-memory_length*2:]:
131
  api_messages.append(msg)
132
 
 
133
  api_messages.append({"role": "user", "content": message})
134
 
 
135
  if not chat_history or message.lower() in ["start", "begin", "begin my adventure"]:
 
136
  api_messages.append({
137
  "role": "system",
138
+ "content": f"Begin a new {genre or 'fantasy'} adventure with an intriguing opening scene. Introduce the protagonist without assuming too much about them."
139
  })
140
 
 
141
  bot_message = ""
142
  try:
143
+ for response_chunk in client.chat_completion(
144
  api_messages,
145
  max_tokens=512,
146
  stream=True,
147
  temperature=0.7,
148
  top_p=0.95,
149
  ):
150
+ delta = response_chunk.choices[0].delta.content
151
  if delta:
152
  bot_message += delta
 
153
  new_history = chat_history.copy()
154
  new_history.append((message, bot_message))
155
  yield new_history
 
176
 
177
  with gr.Row():
178
  with gr.Column(scale=3):
179
+ # Chat window + user input
180
  chatbot = gr.Chatbot(
181
  height=500,
182
  bubble_full_width=False,
183
  show_copy_button=True,
184
  avatar_images=(None, "🧙"),
185
+ type="messages" # Use OpenAI-style messages
186
  )
187
  msg = gr.Textbox(
188
  placeholder="Describe what you want to do next in the story...",
 
203
  value="fantasy"
204
  )
205
 
 
206
  full_memory = gr.Checkbox(
207
  label="Full Story Memory",
208
  value=True,
209
+ info="When enabled, the AI tries to remember the entire story. If disabled, only the last few exchanges are used."
210
  )
211
 
 
212
  gr.Markdown("## Story Starters")
 
213
 
214
+ # -- Create four placeholder buttons for story starters --
215
+ starter_btn1 = gr.Button("Starter 1")
216
+ starter_btn2 = gr.Button("Starter 2")
217
+ starter_btn3 = gr.Button("Starter 3")
218
+ starter_btn4 = gr.Button("Starter 4")
219
+ starter_buttons = [starter_btn1, starter_btn2, starter_btn3, starter_btn4]
220
 
221
+ # Function to update the labels of the 4 starter buttons
222
+ def update_starter_buttons(selected_genre):
223
+ # Grab up to 4 examples from the chosen genre
224
+ examples = get_examples_for_genre(selected_genre)
225
+ # If there are fewer than 4, fill the rest with placeholders or hide them
226
+ button_updates = []
227
+ for i in range(4):
228
+ if i < len(examples):
229
+ button_updates.append(gr.Button.update(value=examples[i], visible=True))
230
+ else:
231
+ button_updates.append(gr.Button.update(value="(no starter)", visible=False))
232
+ return button_updates
233
+
234
+ # Function that populates the msg with the chosen starter text, then calls respond
235
+ def pick_starter(starter_text, chat_history, selected_genre, memory_flag):
236
+ # The function returns the text to put in msg
237
+ # but we also chain the respond function via .then
238
+ return starter_text
239
 
240
+ # Hook each starter button:
241
+ # 1) Put the chosen text into 'msg'
242
+ # 2) Then call 'respond' to update the chatbot
243
+ for starter_button in starter_buttons:
244
+ starter_button.click(
245
+ fn=pick_starter,
246
+ inputs=[starter_button, chatbot, genre, full_memory],
247
  outputs=[msg],
248
  queue=False
249
  ).then(
250
  fn=respond,
251
  inputs=[msg, chatbot, genre, full_memory],
252
+ outputs=[chatbot],
253
+ queue=False
254
  )
255
+
256
+ # Connect the genre dropdown to update these 4 starter buttons
257
+ genre.change(
258
+ fn=update_starter_buttons,
259
+ inputs=[genre],
260
+ outputs=starter_buttons
261
+ )
 
 
 
 
 
 
262
 
263
+ # -- Chat submission + button events --
264
  msg.submit(respond, [msg, chatbot, genre, full_memory], [chatbot])
265
  submit.click(respond, [msg, chatbot, genre, full_memory], [chatbot])
266
 
 
268
  clear.click(lambda: [], None, chatbot, queue=False)
269
  clear.click(lambda: "", None, msg, queue=False)
270
 
271
+ # -- "Download My Story" row --
272
  with gr.Row():
273
+ save_btn = gr.Button("Download My Story", variant="secondary")
274
  story_output = gr.Markdown(visible=False)
275
 
276
+ # "Download My Story" logic
277
  save_btn.click(save_story, inputs=[chatbot], outputs=[story_output])
278
+ save_btn.click(
279
+ fn=lambda: True,
280
+ inputs=None,
281
+ outputs=story_output,
282
+ js="() => {document.getElementById('story_output').scrollIntoView();}",
283
+ queue=False
284
+ )
285
 
286
  if __name__ == "__main__":
287
+ demo.launch(server_name="0.0.0.0", server_port=7860)