PeterPinetree commited on
Commit
ba42e04
·
verified ·
1 Parent(s): dfedc4a

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +380 -0
app.py ADDED
@@ -0,0 +1,380 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import gradio as gr
2
+ import os
3
+ from huggingface_hub import InferenceClient, __version__ as hf_version
4
+ import random
5
+ from typing import Generator, Dict, List, Tuple, Optional
6
+ import logging # Added logging for better debugging
7
+
8
+ # Configure logging with DEBUG level and add version info
9
+ logging.basicConfig(
10
+ level=logging.DEBUG,
11
+ format='%(asctime)s - %(levelname)s - %(message)s'
12
+ )
13
+ logging.debug(f"Using huggingface_hub version: {hf_version}")
14
+
15
+ # Get token from environment variable
16
+ hf_token = os.environ.get("HF_TOKEN")
17
+ client = InferenceClient("HuggingFaceH4/zephyr-7b-beta", token=hf_token)
18
+
19
+ # Story genres with genre-specific example prompts
20
+ GENRE_EXAMPLES = {
21
+ "fairy tale": [
22
+ "I follow the shimmer of fairy dust into a hidden forest",
23
+ "A tiny dragon appears at my window, asking for help to find its mother",
24
+ "A friendly witch invites me into her cozy cottage, offering a warm cup of tea"
25
+ ],
26
+ "fantasy": [
27
+ "I enter the ancient forest seeking the wizard's tower",
28
+ "I approach the dragon cautiously with my shield raised",
29
+ "I try to bargain with the elven council for safe passage"
30
+ ],
31
+ "sci-fi": [
32
+ "I investigate the strange signal coming from the abandoned planet",
33
+ "I negotiate with the alien ambassador about the peace treaty",
34
+ "I try to repair my damaged spacecraft before oxygen runs out"
35
+ ],
36
+ "mystery": [
37
+ "I examine the crime scene for overlooked evidence",
38
+ "I question the nervous butler about the night of the murder",
39
+ "I follow the suspicious figure through the foggy streets"
40
+ ],
41
+ "horror": [
42
+ "I read the forbidden text while the candles flicker",
43
+ "I hide under the bed as footsteps approach",
44
+ "I investigate the strange noises coming from the attic"
45
+ ],
46
+ "western": [
47
+ "I challenge the outlaw to a duel at high noon",
48
+ "I track the bandits through the desert canyon",
49
+ "I enter the saloon looking for information"
50
+ ],
51
+ "cyberpunk": [
52
+ "I jack into the corporate mainframe to steal data",
53
+ "I hide in the neon-lit alleyway from corporate security",
54
+ "I meet my mysterious client in the underground bar"
55
+ ],
56
+ "historical": [
57
+ "I join the resistance against the occupying forces",
58
+ "I navigate the dangerous politics of the royal court",
59
+ "I set sail on a voyage to discover new lands"
60
+ ],
61
+ "post-apocalyptic": [
62
+ "I scavenge the abandoned shopping mall for supplies",
63
+ "I navigate through the radioactive zone using my old map",
64
+ "I hide from the approaching group of raiders"
65
+ ],
66
+ "steampunk": [
67
+ "I pilot my airship through the lightning storm",
68
+ "I present my new invention to the Royal Academy",
69
+ "I sneak aboard the emperor's armored train"
70
+ ]
71
+ }
72
+
73
+ # 2. Add constants at the top for magic numbers
74
+ MAX_HISTORY_LENGTH = 20
75
+ MEMORY_WINDOW = 5 # Reduced from 10 to limit context
76
+ MAX_TOKENS = 1024 # Reduced from 2048 for faster responses
77
+ TEMPERATURE = 0.7 # Slightly reduced for faster convergence
78
+ TOP_P = 0.95
79
+ MIN_RESPONSE_LENGTH = 100 # Reduced from 200 for quicker display
80
+
81
+ def get_examples_for_genre(genre):
82
+ """Get example prompts specific to the selected genre"""
83
+ return GENRE_EXAMPLES.get(genre, GENRE_EXAMPLES["fantasy"])
84
+
85
+ def get_enhanced_system_prompt(genre=None):
86
+ """Generate a detailed system prompt with optional genre specification"""
87
+ selected_genre = genre or "fantasy"
88
+ system_message = f"""You are an interactive storyteller creating an immersive {selected_genre} choose-your-own-adventure story.
89
+ For each response you MUST:
90
+ 1. Write 100-200 words describing the scene, using vivid sensory details
91
+ 2. Always use second-person perspective ("you", "your") to maintain reader immersion
92
+ 3. Include dialogue or your character's thoughts that reveal personality and motivations
93
+ 4. Create a strong sense of atmosphere appropriate for {selected_genre}
94
+ 5. End with EXACTLY THREE numbered choices and NOTHING ELSE AFTER THEM:
95
+ 1. [Complete sentence in second-person starting with a verb]
96
+ 2. [Complete sentence in second-person starting with a verb]
97
+ 3. [Complete sentence in second-person starting with a verb]
98
+ CRITICAL RULES:
99
+ - Provide only ONE set of three choices at the very end of your response
100
+ - Never continue the story after giving choices
101
+ - Never provide additional choices
102
+ - Keep all narrative before the choices
103
+ - End every response with exactly three numbered options
104
+ - Each choice must start with "You" followed by a verb
105
+ Remember: The story continues ONLY when the player makes a choice."""
106
+ return system_message
107
+
108
+ def create_story_summary(chat_history):
109
+ """Create a concise summary of the story so far if the history gets too long"""
110
+ if len(chat_history) <= 2:
111
+ return None
112
+
113
+ story_text = ""
114
+ for i in range(0, len(chat_history), 2):
115
+ if i+1 < len(chat_history):
116
+ story_text += f"User: {chat_history[i]}\nStory: {chat_history[i+1]}\n\n"
117
+
118
+ summary_instruction = {
119
+ "role": "system",
120
+ "content": "The conversation history is getting long. Please create a brief summary of the key plot points and character development so far to help maintain context without exceeding token limits."
121
+ }
122
+ return summary_instruction
123
+
124
+ # Modified function for proper Gradio format (lists)
125
+ def respond(message: str, chat_history: List[Tuple[str, str]], genre: Optional[str] = None, use_full_memory: bool = True) -> Tuple[str, List[Tuple[str, str]]]:
126
+ """Generate a response based on the current message and conversation history."""
127
+ if not message.strip():
128
+ return "", chat_history
129
+
130
+ try:
131
+ # Start with system prompt
132
+ api_messages = [{"role": "system", "content": get_enhanced_system_prompt(genre)}]
133
+ logging.debug(f"System Message: {api_messages[0]}")
134
+
135
+ # Add chat history - convert from tuples to API format
136
+ if chat_history and use_full_memory:
137
+ for user_msg, bot_msg in chat_history[-MEMORY_WINDOW:]:
138
+ api_messages.extend([
139
+ {"role": "user", "content": str(user_msg)},
140
+ {"role": "assistant", "content": str(bot_msg)}
141
+ ])
142
+ logging.debug(f"Chat History Messages: {api_messages[1:]}")
143
+
144
+ # Add current message
145
+ api_messages.append({"role": "user", "content": str(message)})
146
+ logging.debug(f"Final Message List: {api_messages}")
147
+
148
+ # Make API call without timeout
149
+ logging.debug("Making API call...")
150
+ response = client.chat_completion(
151
+ messages=api_messages,
152
+ max_tokens=MAX_TOKENS,
153
+ temperature=TEMPERATURE,
154
+ top_p=TOP_P
155
+ )
156
+ logging.debug("API call completed")
157
+
158
+ # Extract response
159
+ bot_message = response.choices[0].message.content
160
+ logging.debug(f"Bot Response: {bot_message[:100]}...")
161
+
162
+ # Update history using tuple format [(user_msg, bot_msg), ...]
163
+ updated_history = list(chat_history) # Create a copy
164
+ updated_history.append((message, bot_message)) # Add as tuple
165
+ return "", updated_history
166
+
167
+ except Exception as e:
168
+ logging.error("Error in respond function", exc_info=True)
169
+ error_msg = f"Story magic temporarily interrupted. Please try again. (Error: {str(e)})"
170
+ return "", list(chat_history) + [(message, error_msg)]
171
+
172
+ def save_story(chat_history):
173
+ """Convert chat history to markdown and return as downloadable file"""
174
+ if not chat_history:
175
+ return gr.File.update(value=None)
176
+
177
+ try:
178
+ story_text = "# My Adventure\n\n"
179
+ for user_msg, bot_msg in chat_history:
180
+ story_text += f"**Player:** {user_msg}\n\n"
181
+ story_text += f"**Story:** {bot_msg}\n\n---\n\n"
182
+
183
+ # Create temporary file
184
+ temp_file = "my_story.md"
185
+ with open(temp_file, "w", encoding="utf-8") as f:
186
+ f.write(story_text)
187
+
188
+ return temp_file
189
+
190
+ except Exception as e:
191
+ logging.error(f"Error saving story: {e}")
192
+ return gr.File.update(value=None)
193
+
194
+ # Add this function to get a custom avatar image URL
195
+ def get_storyteller_avatar_url():
196
+ """Get a URL for the storyteller avatar from a free image service"""
197
+ # Using an external wizard avatar image
198
+ return "https://api.dicebear.com/7.x/bottts/svg?seed=wizard&backgroundColor=b6e3f4&eyes=bulging"
199
+
200
+ # Add this before your gr.Blocks definition
201
+ custom_css = """
202
+ .compact-file-output > div {
203
+ min-height: 0 !important;
204
+ padding: 0 !important;
205
+ }
206
+ .compact-file-output .file-preview {
207
+ margin: 0 !important;
208
+ display: flex;
209
+ align-items: center;
210
+ }
211
+ .compact-btn {
212
+ padding: 0.5rem !important;
213
+ min-height: 0 !important;
214
+ height: auto !important;
215
+ line-height: 1.2 !important;
216
+ }
217
+ """
218
+
219
+ with gr.Blocks(theme=gr.themes.Soft(), css=custom_css) as demo:
220
+ # Header section with improved instructions
221
+ gr.Markdown("""
222
+ # 🔮 AI Story Studio
223
+ **Collaborate with AI to craft your own adventure, one scene at a time.**
224
+ Pick a genre, start with a prompt or write your own, and guide the story with your choices.
225
+ > **Tip:** The more detail you provide, the deeper the story becomes.
226
+ """)
227
+
228
+ wizard_avatar = get_storyteller_avatar_url()
229
+
230
+ with gr.Row():
231
+ with gr.Column(scale=3):
232
+ # Chat window + user input - USING LIST FORMAT
233
+ chatbot = gr.Chatbot(
234
+ height=400,
235
+ bubble_full_width=True,
236
+ show_copy_button=True,
237
+ avatar_images=(None, wizard_avatar),
238
+ container=True,
239
+ scale=1,
240
+ min_width=800,
241
+ value=[], # Empty list for messages
242
+ render=True
243
+ )
244
+ msg = gr.Textbox(
245
+ placeholder="Describe your next move...",
246
+ container=False,
247
+ scale=4,
248
+ )
249
+
250
+ with gr.Row():
251
+ submit = gr.Button("Continue Story", variant="primary")
252
+ clear = gr.Button("Start New Adventure")
253
+
254
+ with gr.Column(scale=1):
255
+ gr.Markdown("## Adventure Settings")
256
+ genre = gr.Dropdown(
257
+ choices=list(GENRE_EXAMPLES.keys()),
258
+ label="Story Genre",
259
+ info="Choose the theme of your next adventure",
260
+ value="fantasy"
261
+ )
262
+ full_memory = gr.Checkbox(
263
+ label="Full Story Memory",
264
+ value=True,
265
+ info="When enabled, the AI tries to remember the entire story. If disabled, only the last few exchanges are used."
266
+ )
267
+
268
+ gr.Markdown("## Story Starters")
269
+
270
+ # Create three placeholder buttons for story starters
271
+ starter_btn1 = gr.Button("Starter 1", scale=1, min_width=250, elem_classes="compact-btn")
272
+ starter_btn2 = gr.Button("Starter 2", scale=1, min_width=250, elem_classes="compact-btn")
273
+ starter_btn3 = gr.Button("Starter 3", scale=1, min_width=250, elem_classes="compact-btn")
274
+ starter_buttons = [starter_btn1, starter_btn2, starter_btn3]
275
+
276
+ # Simplified update function
277
+ def update_starter_buttons(selected_genre):
278
+ examples = get_examples_for_genre(selected_genre)
279
+ results = []
280
+ for i in range(3):
281
+ if i < len(examples):
282
+ results.append(examples[i])
283
+ else:
284
+ results.append("")
285
+ return tuple(results)
286
+
287
+ # New direct handler for starter clicks
288
+ def use_starter(starter_text: str, history: List[Tuple[str, str]], selected_genre: str, memory_flag: bool) -> Tuple[str, List[Tuple[str, str]]]:
289
+ """Handle starter button clicks with proper message formatting"""
290
+ if not starter_text:
291
+ return "", history
292
+
293
+ try:
294
+ # Use the respond function for consistent handling
295
+ _, updated_history = respond(
296
+ message=starter_text,
297
+ chat_history=history,
298
+ genre=selected_genre,
299
+ use_full_memory=memory_flag
300
+ )
301
+ return "", updated_history
302
+
303
+ except Exception as e:
304
+ error_msg = f"Story magic temporarily interrupted. Please try again. (Error: {str(e)})"
305
+ return "", list(history) + [(starter_text, error_msg)]
306
+
307
+ # Simplified button connections
308
+ for starter_button in starter_buttons:
309
+ starter_button.click(
310
+ fn=use_starter,
311
+ inputs=[starter_button, chatbot, genre, full_memory],
312
+ outputs=[msg, chatbot],
313
+ queue=True
314
+ )
315
+
316
+ # Update buttons when genre changes
317
+ genre.change(
318
+ fn=update_starter_buttons,
319
+ inputs=[genre],
320
+ outputs=starter_buttons
321
+ )
322
+
323
+ # Handler for user input
324
+ msg.submit(
325
+ fn=respond,
326
+ inputs=[msg, chatbot, genre, full_memory],
327
+ outputs=[msg, chatbot]
328
+ )
329
+ submit.click(
330
+ fn=respond,
331
+ inputs=[msg, chatbot, genre, full_memory],
332
+ outputs=[msg, chatbot]
333
+ )
334
+
335
+ # Clear the chatbot for a new adventure
336
+ clear.click(lambda: [], None, chatbot, queue=False)
337
+ clear.click(lambda: "", None, msg, queue=False)
338
+
339
+ # "Download My Story" row with improved layout
340
+ with gr.Row(equal_height=True): # Force equal height for all children
341
+ # Use Column for the button to control width
342
+ with gr.Column(scale=4):
343
+ save_btn = gr.Button("Download My Story", variant="secondary", size="lg")
344
+
345
+ # Use Column for the file output with matching height
346
+ with gr.Column(scale=1):
347
+ story_output = gr.File(
348
+ label=None, # Remove the label that adds extra height
349
+ file_count="single",
350
+ file_types=[".md"],
351
+ interactive=False,
352
+ visible=True,
353
+ elem_classes="compact-file-output" # Optional: for custom CSS styling
354
+ )
355
+
356
+ # Connect the save button to the save_story function
357
+ save_btn.click(
358
+ fn=save_story,
359
+ inputs=[chatbot],
360
+ outputs=story_output,
361
+ queue=False # Process immediately
362
+ )
363
+
364
+ # Initialize buttons with default fantasy genre examples
365
+ initial_examples = get_examples_for_genre("fantasy")
366
+ initial_button_data = tuple(
367
+ initial_examples[i] if i < len(initial_examples) else ""
368
+ for i in range(3)
369
+ )
370
+
371
+ # Update button text on page load
372
+ demo.load(
373
+ fn=lambda: initial_button_data,
374
+ outputs=starter_buttons,
375
+ queue=False
376
+ )
377
+
378
+ # Run the app
379
+ if __name__ == "__main__":
380
+ demo.launch(server_name="0.0.0.0", server_port=7860)