|
import gradio as gr |
|
import torch |
|
|
|
|
|
from transformers import pipeline as translation_pipeline |
|
translator = translation_pipeline("translation", model="Helsinki-NLP/opus-mt-ko-en", device="cpu") |
|
|
|
|
|
from diffusers import DiffusionPipeline |
|
|
|
|
|
|
|
|
|
model_id = "black-forest-labs/FLUX.1-schnell" |
|
|
|
pipe = DiffusionPipeline.from_pretrained( |
|
model_id, |
|
torch_dtype=torch.float32 |
|
).to("cpu") |
|
|
|
|
|
def translate_prompt_if_korean(prompt_text: str) -> str: |
|
|
|
|
|
if any("๊ฐ" <= ch <= "ํฃ" for ch in prompt_text): |
|
result = translator(prompt_text) |
|
return result[0]['translation_text'] |
|
return prompt_text |
|
|
|
def generate_storyboard( |
|
prompt, |
|
width=768, |
|
height=512, |
|
num_inference_steps=10, |
|
guidance_scale=7.5, |
|
seed=42 |
|
): |
|
|
|
prompt_en = translate_prompt_if_korean(prompt) |
|
|
|
|
|
generator = torch.Generator(device="cpu").manual_seed(seed) |
|
|
|
|
|
with torch.autocast("cpu"): |
|
result = pipe( |
|
prompt=prompt_en, |
|
width=width, |
|
height=height, |
|
num_inference_steps=num_inference_steps, |
|
guidance_scale=guidance_scale, |
|
generator=generator |
|
).images[0] |
|
return result |
|
|
|
|
|
|
|
custom_css = """ |
|
#title { |
|
text-align: center; |
|
font-size: 3em; |
|
font-weight: bold; |
|
margin: 20px 0; |
|
color: #333; |
|
} |
|
#subtitle { |
|
text-align: center; |
|
color: #666; |
|
margin-bottom: 30px; |
|
font-size: 1.2em; |
|
} |
|
.gradio-container { |
|
background: linear-gradient(120deg, #f8f8f8 0%, #ffffff 100%); |
|
} |
|
.input-panel, .output-panel { |
|
background: white; |
|
border-radius: 12px; |
|
padding: 20px; |
|
box-shadow: 0 2px 10px rgba(0,0,0,0.1); |
|
} |
|
#prompt-input { |
|
font-size: 14px !important; |
|
min-height: 140px !important; |
|
} |
|
.advanced-settings { |
|
font-size: 0.9em; |
|
color: #444; |
|
} |
|
.example-box { |
|
background: #f9f9f9; |
|
padding: 10px; |
|
margin-top: 10px; |
|
border-radius: 8px; |
|
} |
|
""" |
|
|
|
|
|
with gr.Blocks(css=custom_css) as demo: |
|
gr.Markdown("<div id='title'>Gini Storyboard</div>") |
|
gr.Markdown("<div id='subtitle'>Generate a hand-drawn style storyboard in black & white film noir or any style you wish!</div>") |
|
|
|
with gr.Row(): |
|
with gr.Column(elem_classes="input-panel", scale=1): |
|
prompt = gr.Textbox( |
|
label="Storyboard Prompt", |
|
placeholder="Enter your scene descriptions here (in English or Korean)", |
|
lines=8, |
|
elem_id="prompt-input" |
|
) |
|
seed = gr.Slider( |
|
label="Seed", |
|
value=42, |
|
minimum=0, |
|
maximum=999999, |
|
step=1 |
|
) |
|
with gr.Row(): |
|
width = gr.Slider( |
|
label="Width", |
|
minimum=256, |
|
maximum=1280, |
|
value=768, |
|
step=64 |
|
) |
|
height = gr.Slider( |
|
label="Height", |
|
minimum=256, |
|
maximum=1280, |
|
value=512, |
|
step=64 |
|
) |
|
with gr.Accordion("Advanced Settings", open=False, elem_classes="advanced-settings"): |
|
num_inference_steps = gr.Slider( |
|
label="Number of inference steps", |
|
value=10, |
|
minimum=1, |
|
maximum=50, |
|
step=1 |
|
) |
|
guidance_scale = gr.Slider( |
|
label="Guidance Scale", |
|
value=7.5, |
|
minimum=0.0, |
|
maximum=20.0, |
|
step=0.5 |
|
) |
|
|
|
run_button = gr.Button("Generate Storyboard", variant="primary") |
|
|
|
with gr.Column(elem_classes="output-panel", scale=1): |
|
result = gr.Image(label="Storyboard Result") |
|
|
|
|
|
gr.Markdown("### Example Prompt") |
|
with gr.Box(elem_classes="example-box"): |
|
example_text = ( |
|
"A hand-drawn storyboard style, film noir theme, black and white.\n" |
|
"SCENE 1: A detective enters a dark alley [Frame 1]\n" |
|
"SCENE 2: He notices a shadow [Frame 2]\n" |
|
"SCENE 3: A sudden flash of light reveals a clue [Frame 3]" |
|
) |
|
gr.Markdown(f"```\n{example_text}\n```") |
|
example_button = gr.Button("Use Example") |
|
|
|
|
|
def load_example(): |
|
return example_text |
|
example_button.click(fn=load_example, outputs=[prompt]) |
|
|
|
|
|
run_button.click( |
|
fn=generate_storyboard, |
|
inputs=[prompt, width, height, num_inference_steps, guidance_scale, seed], |
|
outputs=[result] |
|
) |
|
|
|
prompt.submit( |
|
fn=generate_storyboard, |
|
inputs=[prompt, width, height, num_inference_steps, guidance_scale, seed], |
|
outputs=[result] |
|
) |
|
|
|
|
|
if __name__ == "__main__": |
|
demo.queue() |
|
demo.launch(server_name="0.0.0.0", server_port=7860, share=False) |
|
|