PeterPinetree commited on
Commit
99edc13
·
verified ·
1 Parent(s): 31ed2ea

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +331 -72
app.py CHANGED
@@ -1,97 +1,356 @@
1
  import gradio as gr
2
  import os
3
- from huggingface_hub import InferenceClient
4
  import random
5
- from typing import List, Tuple
 
 
 
 
 
 
 
 
6
 
7
  # Get token from environment variable
8
  hf_token = os.environ.get("HF_TOKEN")
9
  client = InferenceClient("HuggingFaceH4/zephyr-7b-beta", token=hf_token)
10
 
11
- # Story genres and starter prompts
12
  GENRE_EXAMPLES = {
13
- "Fantasy": [
14
- "You enter the ancient forest seeking the wizard's tower.",
15
- "Approaching the dragon cautiously, you raise your shield."
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
16
  ],
17
- "Sci-Fi": [
18
- "Hacking into the space station's mainframe, you uncover a secret.",
19
- "Investigating the strange signal from the abandoned planet, you find more than you expected."
 
20
  ],
21
- "Mystery": [
22
- "Examining the crime scene, you notice an overlooked clue.",
23
- "Following the suspicious figure through foggy streets leads you to a discovery."
 
24
  ],
25
- "Horror": [
26
- "You slowly open the creaking door to the basement.",
27
- "Reading the forbidden text, the candles flicker ominously."
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
28
  ]
29
  }
30
 
31
- # System prompt for generating stories
32
- def get_story_prompt(genre: str) -> str:
33
- return f"""You are an AI storyteller crafting an interactive {genre} adventure.
34
- Write engaging scenes with vivid details and always end each response with three numbered choices.
35
- Follow the format:
36
-
37
- 1. [Complete action-based choice]
38
- 2. [Complete action-based choice]
39
- 3. [Complete action-based choice]"""
40
-
41
- # Generate initial story
42
- def generate_story_intro(genre: str) -> str:
43
- prompt = get_story_prompt(genre)
44
- example = random.choice(GENRE_EXAMPLES[genre])
45
- response = client.text_generation(prompt=f"{prompt}\n\nUser: {example}\nAssistant:", max_new_tokens=300)
46
- return response[0]['generated_text'].strip()
47
-
48
- # Continue the story
49
- def continue_story(user_choice: str, history: List[Tuple[str, str]], genre: str) -> str:
50
- prompt = get_story_prompt(genre)
51
- conversation = "\n".join([f"User: {turn[0]}\nAssistant: {turn[1]}" for turn in history[-5:]])
52
- full_prompt = f"{prompt}\n\n{conversation}\nUser: {user_choice}\nAssistant:"
53
- response = client.text_generation(prompt=full_prompt, max_new_tokens=300)
54
- return response[0]['generated_text'].strip()
55
-
56
- # Reset the conversation
57
- def reset_story() -> Tuple[List[Tuple[str, str]], str, str]:
58
- return [], "", ""
59
-
60
- # Gradio UI Setup
61
- with gr.Blocks() as demo:
62
- gr.Markdown(
63
- """
64
- # 🌟 AI Story Studio
65
- **Create Your Own Adventure with AI!**
66
- Choose a genre, start your journey, and guide the story with your choices.
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
67
 
68
- ## 🕹️ How to Play:
69
- 1. **Pick a genre** from the dropdown.
70
- 2. **Start with a story prompt** or enter your own beginning.
71
- 3. **Choose from the AI's options** or type your own response to shape the adventure!
 
 
 
 
72
 
73
- *Tip: Your choices affect the story's outcome!*
74
- """
75
- )
76
 
77
- with gr.Row():
78
- genre_dropdown = gr.Dropdown(
79
- choices=list(GENRE_EXAMPLES.keys()), label="Select Genre", value="Fantasy"
 
 
 
 
80
  )
81
- start_story_btn = gr.Button("Start New Story")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
82
 
83
- chat_history = gr.Chatbot(height=400)
84
- user_input = gr.Textbox(placeholder="Type your next move...")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
85
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
86
  with gr.Row():
87
- submit_btn = gr.Button("Continue Story", variant="primary")
88
- clear_btn = gr.Button("Reset")
 
 
 
 
 
 
 
 
 
 
 
 
 
89
 
90
- # Function connections
91
- start_story_btn.click(fn=generate_story_intro, inputs=[genre_dropdown], outputs=chat_history)
92
- submit_btn.click(fn=continue_story, inputs=[user_input, chat_history, genre_dropdown], outputs=chat_history)
93
- clear_btn.click(fn=reset_story, inputs=[], outputs=[chat_history, user_input])
 
 
 
 
 
 
 
 
 
94
 
95
- # Launch the app
96
  if __name__ == "__main__":
97
- demo.launch()
 
1
  import gradio as gr
2
  import os
3
+ from huggingface_hub import InferenceClient, __version__ as hf_version
4
  import random
5
+ from typing import Generator, Dict, List, Tuple, Optional
6
+ import logging # Added logging for better debugging
7
+
8
+ # Configure logging with DEBUG level and add version info
9
+ logging.basicConfig(
10
+ level=logging.DEBUG,
11
+ format='%(asctime)s - %(levelname)s - %(message)s'
12
+ )
13
+ logging.debug(f"Using huggingface_hub version: {hf_version}")
14
 
15
  # Get token from environment variable
16
  hf_token = os.environ.get("HF_TOKEN")
17
  client = InferenceClient("HuggingFaceH4/zephyr-7b-beta", token=hf_token)
18
 
19
+ # Story genres with genre-specific example prompts
20
  GENRE_EXAMPLES = {
21
+ "fairy tale": [
22
+ "I follow the shimmer of fairy dust into a hidden forest",
23
+ "A tiny dragon appears at my window, asking for help to find its mother",
24
+ "A friendly witch invites me into her cozy cottage, offering a warm cup of tea"
25
+ ],
26
+ "fantasy": [
27
+ "I enter the ancient forest seeking the wizard's tower",
28
+ "I approach the dragon cautiously with my shield raised",
29
+ "I try to bargain with the elven council for safe passage"
30
+ ],
31
+ "sci-fi": [
32
+ "I investigate the strange signal coming from the abandoned planet",
33
+ "I negotiate with the alien ambassador about the peace treaty",
34
+ "I try to repair my damaged spacecraft before oxygen runs out"
35
+ ],
36
+ "mystery": [
37
+ "I examine the crime scene for overlooked evidence",
38
+ "I question the nervous butler about the night of the murder",
39
+ "I follow the suspicious figure through the foggy streets"
40
  ],
41
+ "horror": [
42
+ "I read the forbidden text while the candles flicker",
43
+ "I hide under the bed as footsteps approach",
44
+ "I investigate the strange noises coming from the attic"
45
  ],
46
+ "western": [
47
+ "I challenge the outlaw to a duel at high noon",
48
+ "I track the bandits through the desert canyon",
49
+ "I enter the saloon looking for information"
50
  ],
51
+ "cyberpunk": [
52
+ "I jack into the corporate mainframe to steal data",
53
+ "I hide in the neon-lit alleyway from corporate security",
54
+ "I meet my mysterious client in the underground bar"
55
+ ],
56
+ "historical": [
57
+ "I join the resistance against the occupying forces",
58
+ "I navigate the dangerous politics of the royal court",
59
+ "I set sail on a voyage to discover new lands"
60
+ ],
61
+ "post-apocalyptic": [
62
+ "I scavenge the abandoned shopping mall for supplies",
63
+ "I navigate through the radioactive zone using my old map",
64
+ "I hide from the approaching group of raiders"
65
+ ],
66
+ "steampunk": [
67
+ "I pilot my airship through the lightning storm",
68
+ "I present my new invention to the Royal Academy",
69
+ "I sneak aboard the emperor's armored train"
70
  ]
71
  }
72
 
73
+ # 2. Add constants at the top for magic numbers
74
+ MAX_HISTORY_LENGTH = 20
75
+ MEMORY_WINDOW = 5 # Reduced from 10 to limit context
76
+ MAX_TOKENS = 1024 # Reduced from 2048 for faster responses
77
+ TEMPERATURE = 0.7 # Slightly reduced for faster convergence
78
+ TOP_P = 0.95
79
+ MIN_RESPONSE_LENGTH = 100 # Reduced from 200 for quicker display
80
+
81
+ def get_examples_for_genre(genre):
82
+ """Get example prompts specific to the selected genre"""
83
+ return GENRE_EXAMPLES.get(genre, GENRE_EXAMPLES["fantasy"])
84
+
85
+ def get_enhanced_system_prompt(genre=None):
86
+ """Generate a detailed system prompt with optional genre specification"""
87
+ selected_genre = genre or "fantasy"
88
+ system_message = f"""You are an interactive storyteller creating an immersive {selected_genre} choose-your-own-adventure story.
89
+ For each response you MUST:
90
+ 1. Write 100-200 words describing the scene, using vivid sensory details
91
+ 2. Always use second-person perspective ("you", "your") to maintain reader immersion
92
+ 3. Include dialogue or your character's thoughts that reveal personality and motivations
93
+ 4. Create a strong sense of atmosphere appropriate for {selected_genre}
94
+ 5. End with EXACTLY THREE numbered choices and NOTHING ELSE AFTER THEM:
95
+ 1. [Complete sentence in second-person starting with a verb]
96
+ 2. [Complete sentence in second-person starting with a verb]
97
+ 3. [Complete sentence in second-person starting with a verb]
98
+ CRITICAL RULES:
99
+ - Provide only ONE set of three choices at the very end of your response
100
+ - Never continue the story after giving choices
101
+ - Never provide additional choices
102
+ - Keep all narrative before the choices
103
+ - End every response with exactly three numbered options
104
+ - Each choice must start with "You" followed by a verb
105
+ Remember: The story continues ONLY when the player makes a choice."""
106
+ return system_message
107
+
108
+ def create_story_summary(chat_history):
109
+ """Create a concise summary of the story so far if the history gets too long"""
110
+ if len(chat_history) <= 2:
111
+ return None
112
+
113
+ story_text = ""
114
+ for i in range(0, len(chat_history), 2):
115
+ if i+1 < len(chat_history):
116
+ story_text += f"User: {chat_history[i]}\nStory: {chat_history[i+1]}\n\n"
117
+
118
+ summary_instruction = {
119
+ "role": "system",
120
+ "content": "The conversation history is getting long. Please create a brief summary of the key plot points and character development so far to help maintain context without exceeding token limits."
121
+ }
122
+ return summary_instruction
123
+
124
+ # Modified function for proper Gradio format (lists)
125
+ def respond(message: str, chat_history: List[Tuple[str, str]], genre: Optional[str] = None, use_full_memory: bool = True) -> Tuple[str, List[Tuple[str, str]]]:
126
+ """Generate a response based on the current message and conversation history."""
127
+ if not message.strip():
128
+ return "", chat_history
129
+
130
+ try:
131
+ # Start with system prompt
132
+ api_messages = [{"role": "system", "content": get_enhanced_system_prompt(genre)}]
133
+ logging.debug(f"System Message: {api_messages[0]}")
134
 
135
+ # Add chat history - convert from tuples to API format
136
+ if chat_history and use_full_memory:
137
+ for user_msg, bot_msg in chat_history[-MEMORY_WINDOW:]:
138
+ api_messages.extend([
139
+ {"role": "user", "content": str(user_msg)},
140
+ {"role": "assistant", "content": str(bot_msg)}
141
+ ])
142
+ logging.debug(f"Chat History Messages: {api_messages[1:]}")
143
 
144
+ # Add current message
145
+ api_messages.append({"role": "user", "content": str(message)})
146
+ logging.debug(f"Final Message List: {api_messages}")
147
 
148
+ # Make API call without timeout
149
+ logging.debug("Making API call...")
150
+ response = client.chat_completion(
151
+ messages=api_messages,
152
+ max_tokens=MAX_TOKENS,
153
+ temperature=TEMPERATURE,
154
+ top_p=TOP_P
155
  )
156
+ logging.debug("API call completed")
157
+
158
+ # Extract response
159
+ bot_message = response.choices[0].message.content
160
+ logging.debug(f"Bot Response: {bot_message[:100]}...")
161
+
162
+ # Update history using tuple format [(user_msg, bot_msg), ...]
163
+ updated_history = list(chat_history) # Create a copy
164
+ updated_history.append((message, bot_message)) # Add as tuple
165
+ return "", updated_history
166
+
167
+ except Exception as e:
168
+ logging.error("Error in respond function", exc_info=True)
169
+ error_msg = f"Story magic temporarily interrupted. Please try again. (Error: {str(e)})"
170
+ return "", list(chat_history) + [(message, error_msg)]
171
+
172
+ def save_story(chat_history):
173
+ """Convert chat history to markdown and return as downloadable file"""
174
+ if not chat_history:
175
+ return gr.File.update(value=None)
176
+
177
+ try:
178
+ story_text = "# My Adventure\n\n"
179
+ for user_msg, bot_msg in chat_history:
180
+ story_text += f"**Player:** {user_msg}\n\n"
181
+ story_text += f"**Story:** {bot_msg}\n\n---\n\n"
182
+
183
+ # Create temporary file
184
+ temp_file = "my_story.md"
185
+ with open(temp_file, "w", encoding="utf-8") as f:
186
+ f.write(story_text)
187
+
188
+ return temp_file
189
+
190
+ except Exception as e:
191
+ logging.error(f"Error saving story: {e}")
192
+ return gr.File.update(value=None)
193
+
194
+ # Add this function to get a custom avatar image URL
195
+ def get_storyteller_avatar_url():
196
+ """Get a URL for the storyteller avatar from a free image service"""
197
+ # Using an external wizard avatar image
198
+ return "https://api.dicebear.com/7.x/bottts/svg?seed=wizard&backgroundColor=b6e3f4&eyes=bulging"
199
+
200
+ with gr.Blocks(theme=gr.themes.Soft()) as demo:
201
+ # Header section with improved instructions
202
+ gr.Markdown("""
203
+ # 🔮 AI Story Studio
204
+ **Collaborate with AI to craft your own adventure, one scene at a time.**
205
+ Pick a genre, start with a prompt or write your own, and guide the story with your choices.
206
+
207
+ > **Tip:** The more detail you provide, the deeper the story becomes.
208
+ """)
209
+
210
+ wizard_avatar = get_storyteller_avatar_url()
211
+
212
+ with gr.Row():
213
+ with gr.Column(scale=3):
214
+ # Chat window + user input - USING LIST FORMAT
215
+ chatbot = gr.Chatbot(
216
+ height=500,
217
+ bubble_full_width=True,
218
+ show_copy_button=True,
219
+ avatar_images=(None, wizard_avatar),
220
+ container=True,
221
+ scale=1,
222
+ min_width=800,
223
+ value=[], # Empty list for messages
224
+ render=True
225
+ )
226
+ msg = gr.Textbox(
227
+ placeholder="Describe your next move...",
228
+ container=False,
229
+ scale=4,
230
+ )
231
+
232
+ with gr.Row():
233
+ submit = gr.Button("Continue Story", variant="primary")
234
+ clear = gr.Button("Start New Adventure")
235
+
236
+ with gr.Column(scale=1):
237
+ gr.Markdown("## Adventure Settings")
238
+ genre = gr.Dropdown(
239
+ choices=list(GENRE_EXAMPLES.keys()),
240
+ label="Story Genre",
241
+ info="Choose the theme of your next adventure",
242
+ value="fantasy"
243
+ )
244
+ full_memory = gr.Checkbox(
245
+ label="Full Story Memory",
246
+ value=True,
247
+ info="When enabled, the AI tries to remember the entire story. If disabled, only the last few exchanges are used."
248
+ )
249
+
250
+ gr.Markdown("## Story Starters")
251
+
252
+ # Create four placeholder buttons for story starters
253
+ starter_btn1 = gr.Button("Starter 1")
254
+ starter_btn2 = gr.Button("Starter 2")
255
+ starter_btn3 = gr.Button("Starter 3")
256
+ starter_btn4 = gr.Button("Starter 4")
257
+ starter_buttons = [starter_btn1, starter_btn2, starter_btn3, starter_btn4]
258
+
259
+ # Simplified update function
260
+ def update_starter_buttons(selected_genre):
261
+ examples = get_examples_for_genre(selected_genre)
262
+ results = []
263
+ for i in range(4):
264
+ if i < len(examples):
265
+ results.append(examples[i])
266
+ else:
267
+ results.append("")
268
+ return tuple(results)
269
 
270
+ # New direct handler for starter clicks
271
+ def use_starter(starter_text: str, history: List[Tuple[str, str]], selected_genre: str, memory_flag: bool) -> Tuple[str, List[Tuple[str, str]]]:
272
+ """Handle starter button clicks with proper message formatting"""
273
+ if not starter_text:
274
+ return "", history
275
+
276
+ try:
277
+ # Use the respond function for consistent handling
278
+ _, updated_history = respond(
279
+ message=starter_text,
280
+ chat_history=history,
281
+ genre=selected_genre,
282
+ use_full_memory=memory_flag
283
+ )
284
+ return "", updated_history
285
+
286
+ except Exception as e:
287
+ error_msg = f"Story magic temporarily interrupted. Please try again. (Error: {str(e)})"
288
+ return "", list(history) + [(starter_text, error_msg)]
289
 
290
+ # Simplified button connections
291
+ for starter_button in starter_buttons:
292
+ starter_button.click(
293
+ fn=use_starter,
294
+ inputs=[starter_button, chatbot, genre, full_memory],
295
+ outputs=[msg, chatbot],
296
+ queue=True
297
+ )
298
+
299
+ # Update buttons when genre changes
300
+ genre.change(
301
+ fn=update_starter_buttons,
302
+ inputs=[genre],
303
+ outputs=starter_buttons
304
+ )
305
+
306
+ # Handler for user input
307
+ msg.submit(
308
+ fn=respond,
309
+ inputs=[msg, chatbot, genre, full_memory],
310
+ outputs=[msg, chatbot]
311
+ )
312
+ submit.click(
313
+ fn=respond,
314
+ inputs=[msg, chatbot, genre, full_memory],
315
+ outputs=[msg, chatbot]
316
+ )
317
+
318
+ # Clear the chatbot for a new adventure
319
+ clear.click(lambda: [], None, chatbot, queue=False)
320
+ clear.click(lambda: "", None, msg, queue=False)
321
+
322
+ # "Download My Story" row
323
  with gr.Row():
324
+ save_btn = gr.Button("Download My Story", variant="secondary")
325
+ story_output = gr.File(
326
+ label="Download your story",
327
+ file_count="single",
328
+ file_types=[".md"],
329
+ interactive=False,
330
+ visible=True
331
+ )
332
+
333
+ save_btn.click(
334
+ fn=save_story,
335
+ inputs=[chatbot],
336
+ outputs=story_output,
337
+ queue=False # Process immediately
338
+ )
339
 
340
+ # Initialize buttons with default fantasy genre examples
341
+ initial_examples = get_examples_for_genre("fantasy")
342
+ initial_button_data = tuple(
343
+ initial_examples[i] if i < len(initial_examples) else ""
344
+ for i in range(4)
345
+ )
346
+
347
+ # Update button text on page load
348
+ demo.load(
349
+ fn=lambda: initial_button_data,
350
+ outputs=starter_buttons,
351
+ queue=False
352
+ )
353
 
354
+ # Run the app
355
  if __name__ == "__main__":
356
+ demo.launch(server_name="0.0.0.0", server_port=7860)