import gradio as gr
import os
from groq import Groq
# Initialize Groq client
api_key = os.getenv("GROQ_API_KEY")
client = Groq(api_key=api_key)
# Initialize conversation history
conversation_history = []
def chat_with_bot_stream(user_input):
global conversation_history
conversation_history.append({"role": "user", "content": user_input})
if len(conversation_history) == 1:
conversation_history.insert(0, {
"role": "system",
"content": "You are an expert in storyboarding. Provide structured and insightful responses to queries about creating and refining storyboards."
})
completion = client.chat.completions.create(
model="llama3-70b-8192",
messages=conversation_history,
temperature=1,
max_tokens=1024,
top_p=1,
stream=True,
stop=None,
)
response_content = ""
for chunk in completion:
response_content += chunk.choices[0].delta.content or ""
conversation_history.append({"role": "assistant", "content": response_content})
return [(msg["content"] if msg["role"] == "user" else None,
msg["content"] if msg["role"] == "assistant" else None)
for msg in conversation_history]
# Function to generate a storyboard
def generate_storyboard(scenario, temperature, top_p):
if not scenario.strip():
return "Please provide a scenario to generate the storyboard."
messages = [
{"role": "system", "content": "You are an AI storyteller. Generate a storyboard in a structured table with six scenes."},
{"role": "user", "content": f"Generate a 6-scene storyboard for: {scenario}"}
]
completion = client.chat.completions.create(
model="llama3-70b-8192",
messages=messages,
temperature=temperature,
top_p=top_p,
max_tokens=1024,
stream=False,
stop=None,
)
return completion.choices[0].message.content
TITLE = """
๐ Storyboard Assistant
"""
example_scenarios = [
"A futuristic cityscape under AI governance.",
"A detective solving a mystery in a cyberpunk world.",
"A young explorer discovering an ancient civilization.",
"A spaceship crew encountering an unknown planet.",
"A medieval knight navigating political intrigue."
]
temperature_component = gr.Slider(
minimum=0,
maximum=1,
value=1,
step=0.01,
label="Temperature",
info="Controls randomness. Lower values make responses more deterministic."
)
top_p_component = gr.Slider(
minimum=0,
maximum=1,
value=1,
step=0.01,
label="Top-P Sampling",
info="Limits token selection to tokens with a cumulative probability up to P."
)
with gr.Blocks(theme=gr.themes.Glass(primary_hue="violet", secondary_hue="emerald", neutral_hue="stone")) as demo:
with gr.Tabs():
with gr.TabItem("๐ฌChat"):
gr.HTML(TITLE)
chatbot = gr.Chatbot(label="Storyboard Chatbot")
with gr.Row():
user_input = gr.Textbox(
label="Your Message",
placeholder="Type your question here...",
lines=1
)
send_button = gr.Button("โAsk Question")
# Chatbot functionality
send_button.click(
fn=chat_with_bot_stream,
inputs=user_input,
outputs=chatbot,
queue=True
).then(
fn=lambda: "",
inputs=None,
outputs=user_input
)
with gr.TabItem("๐ Generate Storyboard"):
gr.Markdown("## Generate a Storyboard")
scenario_input = gr.Textbox(label="Enter your scenario")
example_radio = gr.Radio(
choices=example_scenarios,
label="Example Scenarios",
info="Select an example scenario or enter your own."
)
generate_btn = gr.Button("Generate Storyboard")
storyboard_output = gr.Textbox(label="Generated Storyboard", interactive=False)
with gr.Accordion("๐ ๏ธ Customize", open=False):
temperature_component.render()
top_p_component.render()
generate_btn.click(
generate_storyboard,
inputs=[scenario_input, temperature_component, top_p_component],
outputs=storyboard_output
)
example_radio.change(
fn=lambda scenario: scenario if scenario else "No scenario selected.",
inputs=[example_radio],
outputs=[scenario_input]
)
demo.launch()