Spaces:
Running
Running
import gradio as gr | |
import os | |
from groq import Groq | |
# Initialize Groq client | |
api_key = os.getenv("GROQ_API_KEY") | |
client = Groq(api_key=api_key) | |
# Initialize conversation history | |
conversation_history = [] | |
# Function to interact with chatbot | |
def chat_with_bot_stream(user_input, temperature, max_tokens, top_p): | |
global conversation_history | |
conversation_history.append({"role": "user", "content": user_input}) | |
if len(conversation_history) == 1: | |
conversation_history.insert(0, { | |
"role": "system", | |
"content": "You are an expert in storyboarding. Provide structured and insightful responses to queries about creating and refining storyboards." | |
}) | |
completion = client.chat.completions.create( | |
model="llama3-70b-8192", | |
messages=conversation_history, | |
temperature=temperature, | |
max_tokens=max_tokens, | |
top_p=top_p, | |
stream=True, | |
stop=None, | |
) | |
response_content = "" | |
for chunk in completion: | |
response_content += chunk.choices[0].delta.content or "" | |
conversation_history.append({"role": "assistant", "content": response_content}) | |
return [(msg["content"] if msg["role"] == "user" else None, | |
msg["content"] if msg["role"] == "assistant" else None) | |
for msg in conversation_history] | |
# Function to generate a storyboard | |
def generate_storyboard(scenario): | |
if not scenario.strip(): | |
return "Please provide a scenario to generate the storyboard." | |
messages = [ | |
{"role": "system", "content": """You are an AI storyteller. Generate a storyboard in a structured table with six scenes. For each scene you provide: | |
1) A Scenario text describing what problem a persona is trying to resolve and by using what product or feature. | |
2) Storyline text for each scene, descriptive visual information, and the purpose of the scene. | |
You must provide the output in a structured format like a table. | |
"""}, | |
{"role": "user", "content": f"Generate a 6-scene storyboard for: {scenario}"} | |
] | |
completion = client.chat.completions.create( | |
model="llama3-70b-8192", | |
messages=messages, | |
temperature=1, | |
max_tokens=1024, | |
top_p=1, | |
stream=False, | |
stop=None, | |
) | |
return completion.choices[0].message.content | |
TITLE = """ | |
<style> | |
h1 { text-align: center; font-size: 24px; margin-bottom: 10px; } | |
</style> | |
<h1>๐ Storyboard Assistant</h1> | |
""" | |
with gr.Blocks(theme=gr.themes.Glass(primary_hue="violet", secondary_hue="violet", neutral_hue="stone")) as demo: | |
with gr.Tabs(): | |
with gr.TabItem("๐ฌChat"): | |
gr.HTML(TITLE) | |
chatbot = gr.Chatbot(label="Storyboard Chatbot") | |
with gr.Row(): | |
user_input = gr.Textbox( | |
label="Your Message", | |
placeholder="Type your question here...", | |
lines=1 | |
) | |
send_button = gr.Button("โAsk Question") | |
# Example questions | |
gr.Markdown("### Example Questions:") | |
with gr.Row(): | |
example_q1 = gr.Button("How do I structure a storyboard?") | |
example_q2 = gr.Button("What are the key elements of a storyboard?") | |
example_q3 = gr.Button("Can you generate an example storyboard?") | |
example_q4 = gr.Button("How does storyboarding help in UX design?") | |
# Parameters for model control | |
gr.Markdown("### Model Parameters:") | |
with gr.Row(): | |
temperature = gr.Slider(0.0, 2.0, value=1.0, label="Temperature", step=0.1) | |
max_tokens = gr.Slider(256, 2048, value=1024, label="Max Tokens", step=256) | |
top_p = gr.Slider(0.0, 1.0, value=1.0, label="Top P", step=0.1) | |
# Chatbot functionality | |
send_button.click( | |
fn=chat_with_bot_stream, | |
inputs=[user_input, temperature, max_tokens, top_p], | |
outputs=chatbot, | |
queue=True | |
).then( | |
fn=lambda: "", | |
inputs=None, | |
outputs=user_input | |
) | |
# Example question functionality | |
example_q1.click(lambda: chat_with_bot_stream("How do I structure a storyboard?", temperature.value, max_tokens.value, top_p.value), | |
inputs=[], | |
outputs=chatbot) | |
example_q2.click(lambda: chat_with_bot_stream("What are the key elements of a storyboard?", temperature.value, max_tokens.value, top_p.value), | |
inputs=[], | |
outputs=chatbot) | |
example_q3.click(lambda: chat_with_bot_stream("Can you generate an example storyboard?", temperature.value, max_tokens.value, top_p.value), | |
inputs=[], | |
outputs=chatbot) | |
example_q4.click(lambda: chat_with_bot_stream("How does storyboarding help in UX design?", temperature.value, max_tokens.value, top_p.value), | |
inputs=[], | |
outputs=chatbot) | |
with gr.TabItem("๐ Generate Storyboard"): | |
gr.Markdown("## Generate a Storyboard") | |
scenario_input = gr.Textbox(label="Enter your scenario") | |
generate_btn = gr.Button("Generate Storyboard") | |
storyboard_output = gr.Textbox(label="Generated Storyboard", interactive=False) | |
generate_btn.click(generate_storyboard, inputs=scenario_input, outputs=storyboard_output) | |
demo.launch() |