import gradio as gr import numpy as np import random import torch from diffusers import DiffusionPipeline import spaces # 기본 설정 dtype = torch.bfloat16 device = "cuda" if torch.cuda.is_available() else "cpu" # 모델 로드 pipe = DiffusionPipeline.from_pretrained( "black-forest-labs/FLUX.1-schnell", torch_dtype=dtype ).to(device) MAX_SEED = np.iinfo(np.int32).max MAX_IMAGE_SIZE = 2048 # 플로우차트 예시 EXAMPLES = [ { "title": "Business Workflow", "prompt": """A hand-drawn style flowchart, vibrant colors, minimalistic icons. BUSINESS WORKFLOW ├── START [Green Button ~40px] │ ├── COLLECT REQUIREMENTS [Folder Icon] │ └── ANALYZE DATA [Chart Icon] ├── IMPLEMENTATION [Coding Symbol ~50px] │ ├── FRONTEND [Browser Icon] │ └── BACKEND [Server Icon] ├── TEST & INTEGRATION [Gear Icon ~45px] └── DEPLOY └── END [Checkered Flag ~40px]""", "width": 1024, "height": 1024 }, { "title": "Software Release Flow", "prompt": """A hand-drawn style flowchart, pastel colors, arrows between stages. SOFTWARE RELEASE ├── FEATURE BRANCH [Git Branch Icon ~45px] │ ├── DEVELOPMENT [Code Editor] │ └── UNIT TEST [Check Mark] ├── MERGE TO MAIN [Pull Request Icon] │ ├── CI/CD [Pipeline Icon ~40px] │ └── BUILD [Gear Icon] └── PRODUCTION └── DEPLOY [Cloud Upload Icon]""", "width": 1024, "height": 1024 }, { "title": "E-Commerce Checkout", "prompt": """A hand-drawn style flowchart, light watercolor, user journey from cart to payment. E-COMMERCE CHECKOUT ├── CART [Shopping Cart ~40px] │ ├── LOGIN [User Icon] │ └── ADDRESS [Location Pin] ├── PAYMENT [Credit Card Icon ~45px] │ ├── VALIDATION [Lock Icon] │ └── CONFIRMATION [Receipt Icon] └── ORDER COMPLETE └── THANK YOU [Smiley Icon]""", "width": 1024, "height": 1024 }, { "title": "Data Pipeline", "prompt": """A hand-drawn style flowchart, tech-focused, neon highlights, showing data flow. DATA PIPELINE ├── INGESTION [Database Icon ~50px] │ ├── STREAMING [Kafka Symbol] │ └── BATCH [CSV/JSON Files] ├── TRANSFORMATION [Gear Icon ~45px] │ ├── CLEANING [Brush Icon] │ └── AGGREGATION [Bar Graph] ├── STORAGE [Cloud Icon ~50px] └── ANALYTICS └── DASHBOARDS [Monitor Icon]""", "width": 1024, "height": 1024 }, { "title": "Machine Learning Lifecycle", "prompt": """A hand-drawn style flowchart, pastel palette, ML steps from data to deployment. ML LIFECYCLE ├── DATA COLLECTION [Folder Icon ~45px] │ ├── DATA CLEANING [Soap Icon] │ └── FEATURE ENGINEERING [Puzzle Icon] ├── MODEL TRAINING [Robot Icon ~50px] │ ├── HYPERPARAM TUNING [Dial Knob] │ └── EVALUATION [Magnifier Icon] ├── DEPLOYMENT [Cloud Icon ~45px] └── MONITORING └── FEEDBACK LOOP [Arrow Circle Icon]""", "width": 1024, "height": 1024 } ] # Convert examples to Gradio format (if needed) GRADIO_EXAMPLES = [ [example["prompt"], example["width"], example["height"]] for example in EXAMPLES ] @spaces.GPU() def infer(prompt, seed=42, randomize_seed=False, width=1024, height=1024, num_inference_steps=4, progress=gr.Progress(track_tqdm=True)): if randomize_seed: seed = random.randint(0, MAX_SEED) generator = torch.Generator().manual_seed(seed) image = pipe( prompt=prompt, width=width, height=height, num_inference_steps=num_inference_steps, generator=generator, guidance_scale=0.0 # 플로우차트 텍스트에 집중하되, 자유로운 표현 ).images[0] return image, seed # CSS 스타일 (기존 구조 유지, 명칭만 일부 수정) css = """ .container { display: flex; flex-direction: row; height: 100%; } .input-column { flex: 1; padding: 20px; border-right: 2px solid #eee; max-width: 800px; } .examples-column { flex: 1; padding: 20px; overflow-y: auto; background: #f7f7f7; } .title { text-align: center; color: #2a2a2a; padding: 20px; font-size: 2.5em; font-weight: bold; background: linear-gradient(90deg, #f0f0f0 0%, #ffffff 100%); border-bottom: 3px solid #ddd; margin-bottom: 30px; } .subtitle { text-align: center; color: #666; margin-bottom: 30px; } .input-box { background: white; padding: 20px; border-radius: 10px; box-shadow: 0 2px 10px rgba(0,0,0,0.1); margin-bottom: 20px; width: 100%; } .input-box textarea { width: 100% !important; min-width: 600px !important; font-size: 14px !important; line-height: 1.5 !important; padding: 12px !important; } .example-card { background: white; padding: 15px; margin: 10px 0; border-radius: 8px; box-shadow: 0 2px 5px rgba(0,0,0,0.05); } .example-title { font-weight: bold; color: #2a2a2a; margin-bottom: 10px; } .contain { max-width: 1400px !important; margin: 0 auto !important; } .input-area { flex: 2 !important; } .examples-area { flex: 1 !important; } """ # Gradio 인터페이스 with gr.Blocks(css=css) as demo: gr.Markdown( """
GINI Flowchart
Create professional process flowcharts using FLUX AI
""") with gr.Row(equal_height=True): # 왼쪽 입력 컬럼 with gr.Column(elem_id="input-column", scale=2): with gr.Group(elem_classes="input-box"): prompt = gr.Text( label="Flowchart Prompt", placeholder="Enter your process flowchart structure...", lines=10, elem_classes="prompt-input" ) run_button = gr.Button("Generate Flowchart", variant="primary") result = gr.Image(label="Generated Flowchart") with gr.Accordion("Advanced Settings", open=False): seed = gr.Slider( label="Seed", minimum=0, maximum=MAX_SEED, step=1, value=0, ) randomize_seed = gr.Checkbox(label="Randomize seed", value=True) with gr.Row(): width = gr.Slider( label="Width", minimum=256, maximum=MAX_IMAGE_SIZE, step=32, value=1024, ) height = gr.Slider( label="Height", minimum=256, maximum=MAX_IMAGE_SIZE, step=32, value=1024, ) num_inference_steps = gr.Slider( label="Number of inference steps", minimum=1, maximum=50, step=1, value=4, ) # 오른쪽 예제 컬럼 with gr.Column(elem_id="examples-column", scale=1): gr.Markdown("### Example Flowcharts") for example in EXAMPLES: with gr.Group(elem_classes="example-card"): gr.Markdown(f"#### {example['title']}") gr.Markdown(f"```\n{example['prompt']}\n```") def create_example_handler(ex): def handler(): return { prompt: ex["prompt"], width: ex["width"], height: ex["height"] } return handler gr.Button("Use This Example", size="sm").click( fn=create_example_handler(example), outputs=[prompt, width, height] ) # 이벤트 바인딩 (버튼 클릭 & 텍스트박스 엔터) gr.on( triggers=[run_button.click, prompt.submit], fn=infer, inputs=[prompt, seed, randomize_seed, width, height, num_inference_steps], outputs=[result, seed] ) if __name__ == "__main__": demo.queue() demo.launch( server_name="0.0.0.0", server_port=7860, share=False, show_error=True, debug=True )