File size: 4,891 Bytes
de6cf94
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
import gradio as gr
import os
from groq import Groq

# Initialize Groq client
api_key = os.getenv("GROQ_API_KEY")
client = Groq(api_key=api_key)

# Initialize conversation history
conversation_history = []

def chat_with_bot_stream(user_input):
    global conversation_history
    conversation_history.append({"role": "user", "content": user_input})
    
    if len(conversation_history) == 1:
        conversation_history.insert(0, {
            "role": "system",
            "content": "You are an expert in storyboarding. Provide structured and insightful responses to queries about creating and refining storyboards."
        })
    
    completion = client.chat.completions.create(
        model="llama3-70b-8192",
        messages=conversation_history,
        temperature=1,
        max_tokens=1024,
        top_p=1,
        stream=True,
        stop=None,
    )
    
    response_content = ""
    for chunk in completion:
        response_content += chunk.choices[0].delta.content or ""
    
    conversation_history.append({"role": "assistant", "content": response_content})
    
    return [(msg["content"] if msg["role"] == "user" else None, 
             msg["content"] if msg["role"] == "assistant" else None) 
            for msg in conversation_history]

# Function to generate a storyboard
def generate_storyboard(scenario, temperature, top_p):
    if not scenario.strip():
        return "Please provide a scenario to generate the storyboard."
    
    messages = [
        {"role": "system", "content": "You are an AI storyteller. Generate a storyboard in a structured table with six scenes."},
        {"role": "user", "content": f"Generate a 6-scene storyboard for: {scenario}"}
    ]
    
    completion = client.chat.completions.create(
        model="llama3-70b-8192",
        messages=messages,
        temperature=temperature,
        top_p=top_p,
        max_tokens=1024,
        stream=False,
        stop=None,
    )
    return completion.choices[0].message.content

TITLE = """
<style>
h1 { text-align: center; font-size: 24px; margin-bottom: 10px; }
</style>
<h1>๐Ÿ“– Storyboard Assistant</h1>
"""

example_scenarios = [
    "A futuristic cityscape under AI governance.",
    "A detective solving a mystery in a cyberpunk world.",
    "A young explorer discovering an ancient civilization.",
    "A spaceship crew encountering an unknown planet.",
    "A medieval knight navigating political intrigue."
]

temperature_component = gr.Slider(
    minimum=0,
    maximum=1,
    value=1,
    step=0.01,
    label="Temperature",
    info="Controls randomness. Lower values make responses more deterministic."
)

top_p_component = gr.Slider(
    minimum=0,
    maximum=1,
    value=1,
    step=0.01,
    label="Top-P Sampling",
    info="Limits token selection to tokens with a cumulative probability up to P."
)

with gr.Blocks(theme=gr.themes.Glass(primary_hue="violet", secondary_hue="emerald", neutral_hue="stone")) as demo:
    with gr.Tabs():
        with gr.TabItem("๐Ÿ’ฌChat"):
            gr.HTML(TITLE)
            chatbot = gr.Chatbot(label="Storyboard Chatbot")
            with gr.Row():
                user_input = gr.Textbox(
                    label="Your Message",
                    placeholder="Type your question here...",
                    lines=1
                )
                send_button = gr.Button("โœ‹Ask Question")
            
            # Chatbot functionality
            send_button.click(
                fn=chat_with_bot_stream,
                inputs=user_input,
                outputs=chatbot,
                queue=True
            ).then(
                fn=lambda: "",
                inputs=None,
                outputs=user_input
            )
        
        with gr.TabItem("๐Ÿ“– Generate Storyboard"):
            gr.Markdown("## Generate a Storyboard")
            scenario_input = gr.Textbox(label="Enter your scenario")
            example_radio = gr.Radio(
                choices=example_scenarios,
                label="Example Scenarios",
                info="Select an example scenario or enter your own."
            )
            generate_btn = gr.Button("Generate Storyboard")
            storyboard_output = gr.Textbox(label="Generated Storyboard", interactive=False)
            
            with gr.Accordion("๐Ÿ› ๏ธ Customize", open=False):
                temperature_component.render()
                top_p_component.render()
            
            generate_btn.click(
                generate_storyboard, 
                inputs=[scenario_input, temperature_component, top_p_component], 
                outputs=storyboard_output
            )
            
            example_radio.change(
                fn=lambda scenario: scenario if scenario else "No scenario selected.",
                inputs=[example_radio],
                outputs=[scenario_input]
            )

demo.launch()