File size: 12,519 Bytes
92ddc2e
 
d1b54ac
92ddc2e
d1b54ac
92ddc2e
 
c4ad27f
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
d1b54ac
 
 
c4ad27f
d1b54ac
 
 
 
 
 
 
 
 
 
 
 
 
 
3f0c27c
d1b54ac
 
4b95bd0
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
d1b54ac
c4ad27f
d1b54ac
 
c4ad27f
 
4b95bd0
 
 
c4ad27f
 
e6a0ebd
3f0c27c
4b95bd0
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
3f0c27c
c4ad27f
e6a0ebd
3f0c27c
d1b54ac
4b95bd0
c4ad27f
e6a0ebd
d1b54ac
c4ad27f
d1b54ac
 
c4ad27f
 
d1b54ac
c4ad27f
e6a0ebd
d1b54ac
 
 
 
 
c4ad27f
 
 
4b95bd0
 
 
 
d1b54ac
c4ad27f
4b95bd0
d1b54ac
5616d3e
 
 
 
 
 
 
 
 
 
 
c4ad27f
 
d1b54ac
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
c4ad27f
d1b54ac
 
 
 
 
4b95bd0
 
 
 
 
 
 
5616d3e
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
d1b54ac
5616d3e
 
 
 
 
 
 
 
 
 
 
 
 
 
 
d1b54ac
5616d3e
 
 
 
06d02d3
5616d3e
c4ad27f
5616d3e
c4ad27f
5616d3e
c4ad27f
 
 
4b95bd0
 
f010e45
c4ad27f
f010e45
d1b54ac
4b95bd0
 
 
 
 
 
 
 
92ddc2e
 
d1b54ac
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
import gradio as gr
from huggingface_hub import InferenceClient
import random

# Initialize the inference client
client = InferenceClient("HuggingFaceH4/zephyr-7b-beta")

# Story genres with genre-specific example prompts
GENRE_EXAMPLES = {
    "fantasy": [
        "I enter the ancient forest seeking the wizard's tower",
        "I approach the dragon cautiously with my shield raised",
        "I examine the mysterious runes carved into the stone altar",
        "I try to bargain with the elven council for safe passage"
    ],
    "sci-fi": [
        "I hack into the space station's mainframe",
        "I investigate the strange signal coming from the abandoned planet",
        "I negotiate with the alien ambassador about the peace treaty",
        "I try to repair my damaged spacecraft before oxygen runs out"
    ],
    "mystery": [
        "I examine the crime scene for overlooked evidence",
        "I question the nervous butler about the night of the murder",
        "I follow the suspicious figure through the foggy streets",
        "I check the victim's diary for hidden clues"
    ],
    "horror": [
        "I slowly open the creaking door to the basement",
        "I read the forbidden text while the candles flicker",
        "I hide under the bed as footsteps approach",
        "I investigate the strange noises coming from the attic"
    ],
    "western": [
        "I challenge the outlaw to a duel at high noon",
        "I track the bandits through the desert canyon",
        "I enter the saloon looking for information",
        "I defend the stagecoach from the approaching raiders"
    ],
    "cyberpunk": [
        "I jack into the corporate mainframe to steal data",
        "I negotiate with the street gang for cybernetic upgrades",
        "I hide in the neon-lit alleyway from corporate security",
        "I meet my mysterious client in the underground bar"
    ],
    "historical": [
        "I attend the royal ball hoping to meet the mysterious count",
        "I join the resistance against the occupying forces",
        "I navigate the dangerous politics of the royal court",
        "I set sail on a voyage to discover new lands"
    ],
    "post-apocalyptic": [
        "I scavenge the abandoned shopping mall for supplies",
        "I approach the fortified settlement seeking shelter",
        "I navigate through the radioactive zone using my old map",
        "I hide from the approaching group of raiders"
    ],
    "steampunk": [
        "I pilot my airship through the lightning storm",
        "I present my new invention to the Royal Academy",
        "I investigate the mysterious clockwork automaton",
        "I sneak aboard the emperor's armored train"
    ]
}

def get_examples_for_genre(genre):
    """Get example prompts specific to the selected genre"""
    if genre in GENRE_EXAMPLES:
        return GENRE_EXAMPLES[genre]
    else:
        return GENRE_EXAMPLES["fantasy"]  # Default to fantasy

def get_enhanced_system_prompt(genre=None):
    """Generate a detailed system prompt with optional genre specification"""
    selected_genre = genre or "fantasy"
    
    system_message = f"""You are an interactive storyteller creating an immersive {selected_genre} choose-your-own-adventure story.
For each response:
1. Provide vivid sensory descriptions of the scene, environment, and characters
2. Include meaningful dialogue or internal monologue that reveals character motivations
3. End with exactly 3 different possible actions or decisions, each offering a distinct path
4. Maintain consistent world-building and character development
5. Incorporate appropriate atmosphere and tone for a {selected_genre} setting
6. Remember previous choices to create a coherent narrative arc
Format your three options as:
- Option 1: [Complete sentence describing a possible action]
- Option 2: [Complete sentence describing a possible action]
- Option 3: [Complete sentence describing a possible action]
Keep responses engaging but concise (200-300 words maximum). If the user's input doesn't clearly indicate a choice, interpret their intent and move the story forward in the most logical direction."""
    
    return system_message

def create_story_summary(chat_history):
    """Create a concise summary of the story so far if the history gets too long"""
    if len(chat_history) <= 2:  # Not enough history to summarize
        return None
    
    # Extract just the content for summarization
    story_text = ""
    for user_msg, bot_msg in chat_history:
        story_text += f"User: {user_msg}\nStory: {bot_msg}\n\n"
    
    # Add a summary instruction
    summary_instruction = {
        "role": "system",
        "content": "The conversation history is getting long. Please create a brief summary of the key plot points and character development so far to help maintain context without exceeding token limits."
    }
    
    return summary_instruction

def respond(message, chat_history, genre=None, use_full_memory=True):
    """Generate a response based on the current message and conversation history"""
    # Use a more detailed system prompt
    system_message = get_enhanced_system_prompt(genre)
    
    # Format history correctly for the API
    formatted_history = []
    for user_msg, bot_msg in chat_history:
        formatted_history.append({"role": "user", "content": user_msg})
        formatted_history.append({"role": "assistant", "content": bot_msg})
    
    # Create proper API messages
    api_messages = [{"role": "system", "content": system_message}]
    
    # Token management strategy
    # Option 1: Use full history but potentially hit token limits
    if use_full_memory and formatted_history:
        # If there's a lot of history, we might need a summary
        if len(formatted_history) > 20:  # Arbitrary threshold, adjust as needed
            summary_instruction = create_story_summary(chat_history[:len(chat_history)-5])
            if summary_instruction:
                api_messages.append(summary_instruction)
            
            # Add only the most recent exchanges after the summary
            for msg in formatted_history[-10:]:
                api_messages.append(msg)
        else:
            # Add all history if it's not too long
            for msg in formatted_history:
                api_messages.append(msg)
    # Option 2: Limited history - fallback if full memory is disabled
    else:
        # Set a larger memory length but still have a limit
        memory_length = 10  # Increased from 5
        if formatted_history:
            for msg in formatted_history[-memory_length*2:]:
                api_messages.append(msg)
    
    # Add current message
    api_messages.append({"role": "user", "content": message})
    
    # Special handling for story initialization
    if not chat_history or message.lower() in ["start", "begin", "begin my adventure"]:
        # Add a specific instruction for starting a new story
        api_messages.append({
            "role": "system", 
            "content": f"Begin a new {genre or 'fantasy'} adventure with an intriguing opening scene. Introduce the protagonist without assuming too much about them, allowing the user to shape the character."
        })
    
    # Generate and stream response
    bot_message = ""
    try:
        for response in client.chat_completion(
            api_messages,
            max_tokens=512,
            stream=True,
            temperature=0.7,
            top_p=0.95,
        ):
            delta = response.choices[0].delta.content
            if delta:
                bot_message += delta
                # Create a new list for the updated chat history
                new_history = chat_history.copy()
                new_history.append((message, bot_message))
                yield new_history
    except Exception as e:
        error_message = f"Story magic temporarily interrupted. Please try again. (Error: {str(e)})"
        yield chat_history + [(message, error_message)]

def save_story(chat_history):
    """Convert chat history to markdown for download"""
    if not chat_history:
        return "No story to save yet!"
    
    story_text = "# My Interactive Adventure\n\n"
    for user_msg, bot_msg in chat_history:
        story_text += f"**Player:** {user_msg}\n\n"
        story_text += f"**Story:** {bot_msg}\n\n---\n\n"
    
    return story_text

# Create interface with additional customization options
with gr.Blocks(theme=gr.themes.Soft()) as demo:
    gr.Markdown("# 🔮 Interactive Story Adventure")
    gr.Markdown("Immerse yourself in an interactive story where your choices shape the narrative.")
    
    with gr.Row():
        with gr.Column(scale=3):
            chatbot = gr.Chatbot(
                height=500,
                bubble_full_width=False,
                show_copy_button=True,
                avatar_images=(None, "🧙"),
            )
            msg = gr.Textbox(
                placeholder="Describe what you want to do next in the story...",
                container=False,
                scale=4,
            )
            
            with gr.Row():
                submit = gr.Button("Continue Story", variant="primary")
                clear = gr.Button("Start New Adventure")
                
        with gr.Column(scale=1):
            gr.Markdown("## Adventure Settings")
            genre = gr.Dropdown(
                choices=list(GENRE_EXAMPLES.keys()),
                label="Story Genre",
                info="Select a genre for your next adventure",
                value="fantasy"
            )
            
            # Add a memory toggle option
            full_memory = gr.Checkbox(
                label="Full Story Memory",
                value=True,
                info="When enabled, the AI will try to remember the entire story"
            )
            
            # Create story starter buttons that automatically submit
            gr.Markdown("## Story Starters")
            story_starters = []
            
            def create_example_click_handler(example_text):
                def example_click():
                    return example_text
                return example_click
            
            # Create a start button
            start_btn = gr.Button("Begin my adventure", variant="secondary")
            start_btn.click(
                fn=create_example_click_handler("Begin my adventure"),
                outputs=[msg],
                queue=False
            ).then(
                fn=respond,
                inputs=[msg, chatbot, genre, full_memory],
                outputs=[chatbot]
            )
            
            # Create example buttons for the current genre
            example_buttons = []
            for i, example in enumerate(get_examples_for_genre("fantasy")):
                btn = gr.Button(example, elem_id=f"example_{i}")
                btn.click(
                    fn=create_example_click_handler(example),
                    outputs=[msg],
                    queue=False
                ).then(
                    fn=respond,
                    inputs=[msg, chatbot, genre, full_memory],
                    outputs=[chatbot]
                )
                example_buttons.append(btn)
    
    # Function to update example buttons when genre changes
    def update_example_buttons(genre):
        examples = get_examples_for_genre(genre)
        return [gr.Button.update(value=example) for example in examples[:4]]  # Limit to 4 examples
    
    # Connect genre dropdown to update example buttons
    genre.change(
        fn=update_example_buttons,
        inputs=[genre],
        outputs=example_buttons
    )
    
    # Set up event handlers for the chatbot
    msg.submit(respond, [msg, chatbot, genre, full_memory], [chatbot])
    submit.click(respond, [msg, chatbot, genre, full_memory], [chatbot])
    
    # Clear the chatbot for a new adventure
    clear.click(lambda: [], None, chatbot, queue=False)
    clear.click(lambda: "", None, msg, queue=False)
    
    # Add a download story button
    with gr.Row():
        save_btn = gr.Button("Save Story as Markdown", variant="secondary")
        story_output = gr.Markdown(visible=False)
    
    save_btn.click(save_story, inputs=[chatbot], outputs=[story_output])
    save_btn.click(lambda: True, None, story_output, js="() => {document.getElementById('story_output').scrollIntoView();}", queue=False)

if __name__ == "__main__":
    demo.launch(server_name="0.0.0.0", server_port=7860)