File size: 3,534 Bytes
52c4718
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
import gradio as gr
import os
from groq import Groq

# Initialize Groq client
api_key = os.getenv("GROQ_API_KEY")
client = Groq(api_key=api_key)

# Function to generate a storyboard using LLM
def generate_storyboard(scenario):
    if not scenario.strip():
        return "Please provide a scenario to generate the storyboard."
    
    messages = [
        {"role": "system", "content": "You are an AI storyteller. Generate a storyboard in a structured table with six scenes."},
        {"role": "user", "content": f"Generate a 6-scene storyboard for: {scenario}"}
    ]
    
    completion = client.chat.completions.create(
        model="llama3-70b-8192",
        messages=messages,
        temperature=1,
        max_tokens=1024,
        top_p=1,
        stream=False,
        stop=None,
    )
    return completion.choices[0].message.content

# Interactive chatbot with streaming response
def chatbot_stream(user_input):
    messages = [
        {"role": "system", "content": "You are an expert in storyboarding. Answer questions interactively."},
        {"role": "user", "content": user_input}
    ]
    
    completion = client.chat.completions.create(
        model="llama3-70b-8192",
        messages=messages,
        temperature=1,
        max_tokens=1024,
        top_p=1,
        stream=True,
        stop=None,
    )
    
    response = ""
    for chunk in completion:
        text_chunk = chunk.choices[0].delta.content or ""
        response += text_chunk
        yield {"role": "assistant", "content": response}

# Gradio UI with enhanced chat interface
def ui():
    with gr.Blocks(theme=gr.themes.Glass(primary_hue="violet", secondary_hue="emerald", neutral_hue="stone")) as app:
        with gr.Tabs():
            with gr.TabItem("💬Chat"):
                gr.Markdown("# AI Storyboard & Chatbot")
                chatbot = gr.Chatbot(label="Storyboard Chatbot", type="messages")
                with gr.Row():
                    chat_input = gr.Textbox(
                        label="Your Message",
                        placeholder="Type your question here...",
                        lines=1
                    )
                    send_button = gr.Button("✋Ask Question")
                
                # Quick question examples
                quick_questions = ["How do I structure a storyboard?", "What makes a good visual scene?", "How to add depth to a story?"]
                with gr.Row():
                    for question in quick_questions:
                        gr.Button(question, variant="secondary").click(lambda q=question: chatbot_stream(q), inputs=None, outputs=chatbot)
                
                # Chatbot functionality
                send_button.click(
                    fn=chatbot_stream,
                    inputs=chat_input,
                    outputs=chatbot,
                    queue=True
                ).then(
                    fn=lambda: "",  # Clear the input box after sending
                    inputs=None,
                    outputs=chat_input
                )

            with gr.TabItem("📖 Storyboard Generator"):
                gr.Markdown("## Generate a Storyboard")
                scenario_input = gr.Textbox(label="Enter your scenario")
                generate_btn = gr.Button("Generate Storyboard")
                storyboard_output = gr.Textbox(label="Generated Storyboard", interactive=False)
                generate_btn.click(generate_storyboard, inputs=scenario_input, outputs=storyboard_output)
    
    app.launch()

ui()