openfree's picture
Update app.py
5bd1a65 verified
raw
history blame
28.8 kB
import os
import gradio as gr
import random
import time
import logging
import google.generativeai as genai
import torch
import numpy as np
from diffusers import DiffusionPipeline
from transformers import pipeline as hf_pipeline
##############################################################################
# 1) ZeroGPU ํ™˜๊ฒฝ ์ฒ˜๋ฆฌ + device, dtype ์„ค์ •
##############################################################################
# ZeroGPU ์ดˆ๊ธฐํ™” ์‹œ๋„
try:
import zerogpu
zerogpu.init()
device = "cuda" if torch.cuda.is_available() else "cpu"
except ImportError:
# ZeroGPU๊ฐ€ ์„ค์น˜๋˜์ง€ ์•Š์€ ๊ฒฝ์šฐ
if os.getenv("ZERO_GPU"):
print("ZeroGPU environment variable is set but zerogpu package is not installed.")
device = "cpu"
else:
device = "cuda" if torch.cuda.is_available() else "cpu"
except Exception as e:
print(f"Error initializing ZeroGPU: {e}")
device = "cpu"
# GPU์ผ ๋•Œ๋งŒ bfloat16, ๊ทธ ์™ธ์—๋Š” float32
dtype = torch.bfloat16 if device == "cuda" else torch.float32
print(f"Using device: {device}, dtype: {dtype}")
##############################################################################
# 2) ๋ชจ๋ธ ๋กœ๋“œ: ๋ฒˆ์—ญ ๋ชจ๋ธ, DiffusionPipeline
##############################################################################
try:
translator = hf_pipeline(
"translation",
model="Helsinki-NLP/opus-mt-ko-en",
device=0 if device == "cuda" else -1
)
pipe = DiffusionPipeline.from_pretrained(
"black-forest-labs/FLUX.1-schnell",
torch_dtype=dtype
).to(device)
print("Models loaded successfully")
except Exception as e:
print(f"Error loading models: {e}")
# ๋ชจ๋ธ ๋กœ๋“œ ์—๋Ÿฌ ์ฒ˜๋ฆฌ๋ฅผ ์œ„ํ•œ ๋”๋ฏธ ํ•จ์ˆ˜๋“ค
def dummy_translator(text):
return [{'translation_text': text}]
class DummyPipe:
def __call__(self, **kwargs):
from PIL import Image
import numpy as np
dummy_img = Image.fromarray(np.zeros((512, 512, 3), dtype=np.uint8))
class DummyResult:
def __init__(self, img):
self.images = [img]
return DummyResult(dummy_img)
translator = dummy_translator
pipe = DummyPipe()
MAX_SEED = np.iinfo(np.int32).max
MAX_IMAGE_SIZE = 2048
##############################################################################
# ํ•œ๊ตญ์–ด ๊ฐ์ง€ ํ•จ์ˆ˜
##############################################################################
def contains_korean(text):
for char in text:
if ord('๊ฐ€') <= ord(char) <= ord('ํžฃ'):
return True
return False
##############################################################################
# ์ด๋ฏธ์ง€ ์ƒ์„ฑ ํ•จ์ˆ˜
##############################################################################
def generate_design_image(prompt, seed=42, randomize_seed=True, width=1024, height=1024, num_inference_steps=4):
"""
์ƒ์„ฑ๋œ ํ™•์žฅ ์•„์ด๋””์–ด ํ…์ŠคํŠธ(prompt)๋ฅผ ์ž…๋ ฅ๋ฐ›์•„,
ํ•„์š”์‹œ ํ•œ๊ตญ์–ด๋ฅผ ์˜์–ด๋กœ ๋ฒˆ์—ญํ•œ ํ›„ DiffusionPipeline์œผ๋กœ ์ด๋ฏธ์ง€๋ฅผ ์ƒ์„ฑํ•ฉ๋‹ˆ๋‹ค.
"""
original_prompt = prompt
translated = False
# ํ•œ๊ตญ์–ด๊ฐ€ ํฌํ•จ๋˜์–ด ์žˆ์œผ๋ฉด ์˜์–ด๋กœ ๋ฒˆ์—ญ
if contains_korean(prompt):
translation = translator(prompt)
prompt = translation[0]['translation_text']
translated = True
# ๋žœ๋ค ์‹œ๋“œ ์„ค์ •
if randomize_seed:
seed = random.randint(0, MAX_SEED)
generator = torch.Generator(device=device).manual_seed(seed)
image = pipe(
prompt=prompt,
width=width,
height=height,
num_inference_steps=num_inference_steps,
generator=generator,
guidance_scale=0.0
).images[0]
return image
##############################################################################
# ๋กœ๊น… ์„ค์ •
##############################################################################
logging.basicConfig(
level=logging.INFO,
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
handlers=[
logging.FileHandler("api_debug.log"),
logging.StreamHandler()
]
)
logger = logging.getLogger("idea_generator")
##############################################################################
# Gemini API ํ‚ค
##############################################################################
GEMINI_API_KEY = os.getenv("GEMINI_API_KEY")
genai.configure(api_key=GEMINI_API_KEY)
##############################################################################
# ์„ ํƒ์  ๋ณ€ํ˜• ์„ ํƒ ํ•จ์ˆ˜
##############################################################################
def choose_alternative(transformation):
if "/" not in transformation:
return transformation
parts = transformation.split("/")
if len(parts) != 2:
return random.choice([part.strip() for part in parts])
left = parts[0].strip()
right = parts[1].strip()
if " " in left:
tokens = left.split(" ", 1)
prefix = tokens[0]
if not right.startswith(prefix):
option1 = left
option2 = prefix + " " + right
else:
option1 = left
option2 = right
return random.choice([option1, option2])
else:
return random.choice([left, right])
##############################################################################
# ๋ฌผ๋ฆฌ์  ๋ณ€ํ™” ์นดํ…Œ๊ณ ๋ฆฌ ์‚ฌ์ „ (์ด 15๊ฐœ)
##############################################################################
physical_transformation_categories = {
"๊ณต๊ฐ„ ์ด๋™": [
"์•ž/๋’ค ์ด๋™", "์ขŒ/์šฐ ์ด๋™", "์œ„/์•„๋ž˜ ์ด๋™", "์„ธ๋กœ์ถ• ํšŒ์ „(๊ณ ๊ฐœ ๋„๋•์ž„)",
"๊ฐ€๋กœ์ถ• ํšŒ์ „(๊ณ ๊ฐœ ์ “๊ธฐ)", "๊ธธ์ด์ถ• ํšŒ์ „(์˜†์œผ๋กœ ๊ธฐ์šธ์ž„)", "์› ์šด๋™", "๋‚˜์„ ํ˜• ์ด๋™",
"๊ด€์„ฑ์— ์˜ํ•œ ๋ฏธ๋„๋Ÿฌ์ง", "ํšŒ์ „์ถ• ๋ณ€ํ™”", "๋ถˆ๊ทœ์น™ ํšŒ์ „", "ํ”๋“ค๋ฆผ ์šด๋™", "ํฌ๋ฌผ์„  ์ด๋™",
"๋ฌด์ค‘๋ ฅ ๋ถ€์œ ", "์ˆ˜๋ฉด ์œ„ ๋ถ€์œ ", "์ ํ”„/๋„์•ฝ", "์Šฌ๋ผ์ด๋”ฉ", "๋กค๋ง", "์ž์œ  ๋‚™ํ•˜",
"์™•๋ณต ์šด๋™", "ํƒ„์„ฑ ํŠ•๊น€", "๊ด€ํ†ต", "ํšŒํ”ผ ์›€์ง์ž„", "์ง€๊ทธ์žฌ๊ทธ ์ด๋™", "์Šค์œ™ ์šด๋™"
],
"ํฌ๊ธฐ์™€ ํ˜•ํƒœ ๋ณ€ํ™”": [
"๋ถ€ํ”ผ ๋Š˜์–ด๋‚จ/์ค„์–ด๋“ฆ", "๊ธธ์ด ๋Š˜์–ด๋‚จ/์ค„์–ด๋“ฆ", "๋„ˆ๋น„ ๋Š˜์–ด๋‚จ/์ค„์–ด๋“ฆ", "๋†’์ด ๋Š˜์–ด๋‚จ/์ค„์–ด๋“ฆ",
"๋ฐ€๋„ ๋ณ€ํ™”", "๋ฌด๊ฒŒ ์ฆ๊ฐ€/๊ฐ์†Œ", "๋ชจ์–‘ ๋ณ€ํ˜•", "์ƒํƒœ ๋ณ€ํ™”", "๋ถˆ๊ท ๋“ฑ ๋ณ€ํ˜•",
"๋ณต์žกํ•œ ํ˜•ํƒœ ๋ณ€ํ˜•", "๋น„ํ‹€๋ฆผ/๊ผฌ์ž„", "๋ถˆ๊ท ์ผํ•œ ํ™•์žฅ/์ถ•์†Œ", "๋ชจ์„œ๋ฆฌ ๋‘ฅ๊ธ€๊ฒŒ/๋‚ ์นด๋กญ๊ฒŒ",
"๊นจ์ง/๊ฐˆ๋ผ์ง", "์—ฌ๋Ÿฌ ์กฐ๊ฐ ๋‚˜๋ˆ ์ง", "๋ฌผ ์ €ํ•ญ", "๋จผ์ง€ ์ €ํ•ญ", "์ฐŒ๊ทธ๋Ÿฌ์ง/๋ณต์›",
"์ ‘ํž˜/ํŽผ์ณ์ง", "์••์ฐฉ/ํŒฝ์ฐฝ", "๋Š˜์–ด๋‚จ/์ˆ˜์ถ•", "๊ตฌ๊ฒจ์ง/ํ‰ํ‰ํ•ด์ง", "๋ญ‰๊ฐœ์ง/๋‹จ๋‹จํ•ด์ง",
"๋ง๋ฆผ/ํŽด์ง", "๊บพ์ž„/๊ตฌ๋ถ€๋Ÿฌ์ง"
],
"ํ‘œ๋ฉด ๋ฐ ์™ธ๊ด€ ๋ณ€ํ™”": [
"์ƒ‰์ƒ ๋ณ€ํ™”", "์งˆ๊ฐ ๋ณ€ํ™”", "ํˆฌ๋ช…/๋ถˆํˆฌ๋ช… ๋ณ€ํ™”", "๋ฐ˜์ง์ž„/๋ฌด๊ด‘ ๋ณ€ํ™”",
"๋น› ๋ฐ˜์‚ฌ ์ •๋„ ๋ณ€ํ™”", "๋ฌด๋Šฌ ๋ณ€ํ™”", "๊ฐ๋„์— ๋”ฐ๋ฅธ ์ƒ‰์ƒ ๋ณ€ํ™”", "๋น›์— ๋”ฐ๋ฅธ ์ƒ‰์ƒ ๋ณ€ํ™”",
"์˜จ๋„์— ๋”ฐ๋ฅธ ์ƒ‰์ƒ ๋ณ€ํ™”", "ํ™€๋กœ๊ทธ๋žจ ํšจ๊ณผ", "ํ‘œ๋ฉด ๊ฐ๋„๋ณ„ ๋น› ๋ฐ˜์‚ฌ", "ํ‘œ๋ฉด ๋ชจ์–‘ ๋ณ€ํ˜•",
"์ดˆ๋ฏธ์„ธ ํ‘œ๋ฉด ๊ตฌ์กฐ ๋ณ€ํ™”", "์ž๊ฐ€ ์„ธ์ • ํšจ๊ณผ", "์–ผ๋ฃฉ/ํŒจํ„ด ์ƒ์„ฑ", "ํ๋ฆผ/์„ ๋ช…ํ•จ ๋ณ€ํ™”",
"๊ด‘ํƒ/์œค๊ธฐ ๋ณ€ํ™”", "์ƒ‰์กฐ/์ฑ„๋„ ๋ณ€ํ™”", "๋ฐœ๊ด‘/ํ˜•๊ด‘", "๋น› ์‚ฐ๋ž€ ํšจ๊ณผ",
"๋น› ํก์ˆ˜ ๋ณ€ํ™”", "๋ฐ˜ํˆฌ๋ช… ํšจ๊ณผ", "๊ทธ๋ฆผ์ž ํšจ๊ณผ ๋ณ€ํ™”", "์ž์™ธ์„  ๋ฐ˜์‘ ๋ณ€ํ™”",
"์•ผ๊ด‘ ํšจ๊ณผ"
],
"๋ฌผ์งˆ์˜ ์ƒํƒœ ๋ณ€ํ™”": [
"๊ณ ์ฒด/์•ก์ฒด/๊ธฐ์ฒด ์ „ํ™˜", "๊ฒฐ์ •ํ™”/์šฉํ•ด", "์‚ฐํ™”/๋ถ€์‹", "๋”ฑ๋”ฑํ•ด์ง/๋ถ€๋“œ๋Ÿฌ์›Œ์ง",
"ํŠน์ˆ˜ ์ƒํƒœ ์ „ํ™˜", "๋ฌด์ •ํ˜•/๊ฒฐ์ •ํ˜• ์ „ํ™˜", "์„ฑ๋ถ„ ๋ถ„๋ฆฌ", "๋ฏธ์„ธ ์ž…์ž ํ˜•์„ฑ/๋ถ„ํ•ด",
"์ ค ํ˜•์„ฑ/ํ’€์–ด์ง", "์ค€์•ˆ์ • ์ƒํƒœ ๋ณ€ํ™”", "๋ถ„์ž ์ž๊ฐ€ ์ •๋ ฌ/๋ถ„ํ•ด", "์ƒํƒœ๋ณ€ํ™” ์ง€์—ฐ ํ˜„์ƒ",
"๋…น์Œ", "๊ตณ์Œ", "์ฆ๋ฐœ/์‘์ถ•", "์Šนํ™”/์ฆ์ฐฉ", "์นจ์ „/๋ถ€์œ ", "๋ถ„์‚ฐ/์‘์ง‘",
"๊ฑด์กฐ/์Šต์œค", "ํŒฝ์œค/์ˆ˜์ถ•", "๋™๊ฒฐ/ํ•ด๋™", "ํ’ํ™”/์นจ์‹", "์ถฉ์ „/๋ฐฉ์ „",
"๊ฒฐํ•ฉ/๋ถ„๋ฆฌ", "๋ฐœํšจ/๋ถ€ํŒจ"
],
"์—ด ๊ด€๋ จ ๋ณ€ํ™”": [
"์˜จ๋„ ์ƒ์Šน/ํ•˜๊ฐ•", "์—ด์— ์˜ํ•œ ํŒฝ์ฐฝ/์ˆ˜์ถ•", "์—ด ์ „๋‹ฌ/์ฐจ๋‹จ", "์••๋ ฅ ์ƒ์Šน/ํ•˜๊ฐ•",
"์—ด ๋ณ€ํ™”์— ๋”ฐ๋ฅธ ์žํ™”", "๋ฌด์งˆ์„œ๋„ ๋ณ€ํ™”", "์—ด์ „๊ธฐ ํ˜„์ƒ", "์ž๊ธฐ์žฅ์— ์˜ํ•œ ์—ด ๋ณ€ํ™”",
"์ƒํƒœ๋ณ€ํ™” ์ค‘ ์—ด ์ €์žฅ/๋ฐฉ์ถœ", "์—ด ์ŠคํŠธ๋ ˆ์Šค ๋ฐœ์ƒ/ํ•ด์†Œ", "๊ธ‰๊ฒฉํ•œ ์˜จ๋„ ๋ณ€ํ™” ์˜ํ–ฅ",
"๋ณต์‚ฌ์—ด์— ์˜ํ•œ ๋ƒ‰๊ฐ/๊ฐ€์—ด", "๋ฐœ์—ด/ํก์—ด", "์—ด ๋ถ„ํฌ ๋ณ€ํ™”", "์—ด ๋ฐ˜์‚ฌ/ํก์ˆ˜",
"๋ƒ‰๊ฐ ์‘์ถ•", "์—ด ํ™œ์„ฑํ™”", "์—ด ๋ณ€์ƒ‰", "์—ด ํŒฝ์ฐฝ ๊ณ„์ˆ˜ ๋ณ€ํ™”", "์—ด ์•ˆ์ •์„ฑ ๋ณ€ํ™”",
"๋‚ด์—ด์„ฑ/๋‚ดํ•œ์„ฑ", "์ž๊ธฐ๋ฐœ์—ด", "์—ด์  ํ‰ํ˜•/๋ถˆ๊ท ํ˜•", "์—ด์  ๋ณ€ํ˜•", "์—ด ๋ถ„์‚ฐ/์ง‘์ค‘"
],
"์›€์ง์ž„ ํŠน์„ฑ ๋ณ€ํ™”": [
"๊ฐ€์†/๊ฐ์†", "์ผ์ • ์†๋„ ์œ ์ง€", "์ง„๋™/์ง„๋™ ๊ฐ์†Œ", "๋ถ€๋”ชํž˜/ํŠ•๊น€",
"ํšŒ์ „ ์†๋„ ์ฆ๊ฐ€/๊ฐ์†Œ", "ํšŒ์ „ ๋ฐฉํ–ฅ ๋ณ€ํ™”", "๋ถˆ๊ทœ์น™ ์›€์ง์ž„", "๋ฉˆ์ท„๋‹ค ๋ฏธ๋„๋Ÿฌ์ง€๋Š” ํ˜„์ƒ",
"๊ณต์ง„/๋ฐ˜๊ณต์ง„", "์œ ์ฒด ์† ์ €ํ•ญ/์–‘๋ ฅ ๋ณ€ํ™”", "์›€์ง์ž„ ์ €ํ•ญ ๋ณ€ํ™”", "๋ณตํ•ฉ ์ง„๋™ ์›€์ง์ž„",
"ํŠน์ˆ˜ ์œ ์ฒด ์† ์›€์ง์ž„", "ํšŒ์ „-์ด๋™ ์—ฐ๊ณ„ ์›€์ง์ž„", "๊ด€์„ฑ ์ •์ง€", "์ถฉ๊ฒฉ ํก์ˆ˜",
"์ถฉ๊ฒฉ ์ „๋‹ฌ", "์šด๋™๋Ÿ‰ ๋ณด์กด", "๋งˆ์ฐฐ๋ ฅ ๋ณ€ํ™”", "๊ด€์„ฑ ํƒˆ์ถœ", "๋ถˆ์•ˆ์ • ๊ท ํ˜•",
"๋™์  ์•ˆ์ •์„ฑ", "ํ”๋“ค๋ฆผ ๊ฐ์‡ ", "๊ฒฝ๋กœ ์˜ˆ์ธก์„ฑ", "ํšŒํ”ผ ์›€์ง์ž„"
],
"๊ตฌ์กฐ์  ๋ณ€ํ™”": [
"๋ถ€ํ’ˆ ์ถ”๊ฐ€/์ œ๊ฑฐ", "์กฐ๋ฆฝ/๋ถ„ํ•ด", "์ ‘๊ธฐ/ํŽด๊ธฐ", "๋ณ€ํ˜•/์›์ƒ๋ณต๊ตฌ", "์ตœ์  ๊ตฌ์กฐ ๋ณ€ํ™”",
"์ž๊ฐ€ ์žฌ๋ฐฐ์—ด", "์ž์—ฐ ํŒจํ„ด ํ˜•์„ฑ/์†Œ๋ฉธ", "๊ทœ์น™์  ํŒจํ„ด ๋ณ€ํ™”", "๋ชจ๋“ˆ์‹ ๋ณ€ํ˜•",
"๋ณต์žก์„ฑ ์ฆ๊ฐ€ ๊ตฌ์กฐ", "์›๋ž˜ ๋ชจ์–‘ ๊ธฐ์–ต ํšจ๊ณผ", "์‹œ๊ฐ„์— ๋”ฐ๋ฅธ ํ˜•ํƒœ ๋ณ€ํ™”", "๋ถ€๋ถ„ ์ œ๊ฑฐ",
"๋ถ€๋ถ„ ๊ต์ฒด", "๊ฒฐํ•ฉ", "๋ถ„๋ฆฌ", "๋ถ„ํ• /ํ†ตํ•ฉ", "์ค‘์ฒฉ/๊ฒน์นจ", "๋‚ด๋ถ€ ๊ตฌ์กฐ ๋ณ€ํ™”",
"์™ธ๋ถ€ ๊ตฌ์กฐ ๋ณ€ํ™”", "์ค‘์‹ฌ์ถ• ์ด๋™", "๊ท ํ˜•์  ๋ณ€ํ™”", "๊ณ„์ธต ๊ตฌ์กฐ ๋ณ€ํ™”", "์ง€์ง€ ๊ตฌ์กฐ ๋ณ€ํ™”",
"์‘๋ ฅ ๋ถ„์‚ฐ ๊ตฌ์กฐ", "์ถฉ๊ฒฉ ํก์ˆ˜ ๊ตฌ์กฐ", "๊ทธ๋ฆฌ๋“œ/๋งคํŠธ๋ฆญ์Šค ๊ตฌ์กฐ ๋ณ€ํ™”", "์ƒํ˜ธ ์—ฐ๊ฒฐ์„ฑ ๋ณ€ํ™”"
],
"์ „๊ธฐ ๋ฐ ์ž๊ธฐ ๋ณ€ํ™”": [
"์ž์„ฑ ์ƒ์„ฑ/์†Œ๋ฉธ", "์ „ํ•˜๋Ÿ‰ ์ฆ๊ฐ€/๊ฐ์†Œ", "์ „๊ธฐ์žฅ ์ƒ์„ฑ/์†Œ๋ฉธ", "์ž๊ธฐ์žฅ ์ƒ์„ฑ/์†Œ๋ฉธ",
"์ดˆ์ „๋„ ์ƒํƒœ ์ „ํ™˜", "๊ฐ•์œ ์ „์ฒด ํŠน์„ฑ ๋ณ€ํ™”", "์–‘์ž ์ƒํƒœ ๋ณ€ํ™”", "ํ”Œ๋ผ์ฆˆ๋งˆ ์ƒํƒœ ํ˜•์„ฑ/์†Œ๋ฉธ",
"์Šคํ•€ํŒŒ ์ „๋‹ฌ", "๋น›์— ์˜ํ•œ ์ „๊ธฐ ๋ฐœ์ƒ", "์••๋ ฅ์— ์˜ํ•œ ์ „๊ธฐ ๋ฐœ์ƒ", "์ž๊ธฐ์žฅ ์† ์ „๋ฅ˜ ๋ณ€ํ™”",
"์ „๊ธฐ ์ €ํ•ญ ๋ณ€ํ™”", "์ „๊ธฐ ์ „๋„์„ฑ ๋ณ€ํ™”", "์ •์ „๊ธฐ ๋ฐœ์ƒ/๋ฐฉ์ „", "์ „์ž๊ธฐ ์œ ๋„",
"์ „์ž๊ธฐํŒŒ ๋ฐฉ์ถœ/ํก์ˆ˜", "์ „๊ธฐ ์šฉ๋Ÿ‰ ๋ณ€ํ™”", "์ž๊ธฐ ์ด๋ ฅ ํ˜„์ƒ", "์ „๊ธฐ์  ๋ถ„๊ทน",
"์ „์ž ํ๋ฆ„ ๋ฐฉํ–ฅ ๋ณ€ํ™”", "์ „๊ธฐ์  ๊ณต๋ช…", "์ „๊ธฐ์  ์ฐจํ/๋…ธ์ถœ", "์ž๊ธฐ ์ฐจํ/๋…ธ์ถœ",
"์ž๊ธฐ์žฅ ๋ฐฉํ–ฅ ์ •๋ ฌ"
],
"ํ™”ํ•™์  ๋ณ€ํ™”": [
"ํ‘œ๋ฉด ์ฝ”ํŒ… ๋ณ€ํ™”", "๋ฌผ์งˆ ์„ฑ๋ถ„ ๋ณ€ํ™”", "ํ™”ํ•™ ๋ฐ˜์‘ ๋ณ€ํ™”", "์ด‰๋งค ์ž‘์šฉ ์‹œ์ž‘/์ค‘๋‹จ",
"๋น›์— ์˜ํ•œ ํ™”ํ•™ ๋ฐ˜์‘", "์ „๊ธฐ์— ์˜ํ•œ ํ™”ํ•™ ๋ฐ˜์‘", "๋‹จ๋ถ„์ž๋ง‰ ํ˜•์„ฑ", "๋ถ„์ž ์ˆ˜์ค€ ๊ณ„์‚ฐ ๋ณ€ํ™”",
"์ž์—ฐ ๋ชจ๋ฐฉ ํ‘œ๋ฉด ๋ณ€ํ™”", "ํ™˜๊ฒฝ ๋ฐ˜์‘ํ˜• ๋ฌผ์งˆ ๋ณ€ํ™”", "์ฃผ๊ธฐ์  ํ™”ํ•™ ๋ฐ˜์‘", "์‚ฐํ™”", "ํ™˜์›",
"๊ณ ๋ถ„์žํ™”", "๋ฌผ ๋ถ„ํ•ด", "ํ™”ํ•ฉ", "๋ฐฉ์‚ฌ์„  ์˜ํ–ฅ", "์‚ฐ-์—ผ๊ธฐ ๋ฐ˜์‘", "์ค‘ํ™” ๋ฐ˜์‘",
"์ด์˜จํ™”", "ํ™”ํ•™์  ํก์ฐฉ/ํƒˆ์ฐฉ", "์ด‰๋งค ํšจ์œจ ๋ณ€ํ™”", "ํšจ์†Œ ํ™œ์„ฑ ๋ณ€ํ™”", "๋ฐœ์ƒ‰ ๋ฐ˜์‘",
"pH ๋ณ€ํ™”", "ํ™”ํ•™์  ํ‰ํ˜• ์ด๋™", "๊ฒฐํ•ฉ ํ˜•์„ฑ/๋ถ„ํ•ด", "์šฉํ•ด๋„ ๋ณ€ํ™”"
],
"์‹œ๊ฐ„ ๊ด€๋ จ ๋ณ€ํ™”": [
"๋…ธํ™”/ํ’ํ™”", "๋งˆ๋ชจ/๋ถ€์‹", "์ƒ‰ ๋ฐ”๋žจ/๋ณ€์ƒ‰", "์†์ƒ/ํšŒ๋ณต", "์ˆ˜๋ช… ์ฃผ๊ธฐ ๋ณ€ํ™”",
"์‚ฌ์šฉ์ž ์ƒํ˜ธ์ž‘์šฉ์— ๋”ฐ๋ฅธ ์ ์‘", "ํ•™์Šต ๊ธฐ๋ฐ˜ ํ˜•ํƒœ ์ตœ์ ํ™”", "์‹œ๊ฐ„์— ๋”ฐ๋ฅธ ๋ฌผ์„ฑ ๋ณ€ํ™”",
"์ง‘๋‹จ ๊ธฐ์–ต ํšจ๊ณผ", "๋ฌธํ™”์  ์˜๋ฏธ ๋ณ€ํ™”", "์ง€์—ฐ ๋ฐ˜์‘", "์ด์ „ ์ƒํƒœ ์˜์กด ๋ณ€ํ™”",
"์ ์ง„์  ์‹œ๊ฐ„ ๋ณ€ํ™”", "์ง„ํ™”์  ๋ณ€ํ™”", "์ฃผ๊ธฐ์  ์žฌ์ƒ", "๊ณ„์ ˆ ๋ณ€ํ™” ์ ์‘",
"์ƒ์ฒด๋ฆฌ๋“ฌ ๋ณ€ํ™”", "์ƒ์•  ์ฃผ๊ธฐ ๋‹จ๊ณ„", "์„ฑ์žฅ/ํ‡ดํ™”", "์ž๊ธฐ ๋ณต๊ตฌ/์žฌ์ƒ",
"์ž์—ฐ ์ˆœํ™˜ ์ ์‘", "์ง€์†์„ฑ/์ผ์‹œ์„ฑ", "๊ธฐ์–ต ํšจ๊ณผ", "์ง€์—ฐ๋œ ์ž‘์šฉ", "๋ˆ„์  ํšจ๊ณผ"
],
"๋น›๊ณผ ์‹œ๊ฐ ํšจ๊ณผ": [
"๋ฐœ๊ด‘/์†Œ๋“ฑ", "๋น› ํˆฌ๊ณผ/์ฐจ๋‹จ", "๋น› ์‚ฐ๋ž€/์ง‘์ค‘", "์ƒ‰์ƒ ์ŠคํŽ™ํŠธ๋Ÿผ ๋ณ€ํ™”", "๋น› ํšŒ์ ˆ",
"๋น› ๊ฐ„์„ญ", "ํ™€๋กœ๊ทธ๋žจ ์ƒ์„ฑ", "๋ ˆ์ด์ € ํšจ๊ณผ", "๋น› ํŽธ๊ด‘", "ํ˜•๊ด‘/์ธ๊ด‘",
"์ž์™ธ์„ /์ ์™ธ์„  ๋ฐœ๊ด‘", "๊ด‘ํ•™์  ์ฐฉ์‹œ", "๋น› ๊ตด์ ˆ", "๊ทธ๋ฆผ์ž ์ƒ์„ฑ/์ œ๊ฑฐ",
"์ƒ‰์ˆ˜์ฐจ ํšจ๊ณผ", "๋ฌด์ง€๊ฐœ ํšจ๊ณผ", "๊ธ€๋กœ์šฐ ํšจ๊ณผ", "ํ”Œ๋ž˜์‹œ ํšจ๊ณผ", "์กฐ๋ช… ํŒจํ„ด",
"๋น” ํšจ๊ณผ", "๊ด‘ ํ•„ํ„ฐ ํšจ๊ณผ", "๋น›์˜ ๋ฐฉํ–ฅ์„ฑ ๋ณ€ํ™”", "ํˆฌ์˜ ํšจ๊ณผ", "๋น› ๊ฐ์ง€/๋ฐ˜์‘",
"๊ด‘๋„ ๋ณ€ํ™”"
],
"์†Œ๋ฆฌ์™€ ์ง„๋™ ํšจ๊ณผ": [
"์†Œ๋ฆฌ ๋ฐœ์ƒ/์†Œ๋ฉธ", "์†Œ๋ฆฌ ๋†’๋‚ฎ์ด ๋ณ€ํ™”", "์†Œ๋ฆฌ ํฌ๊ธฐ ๋ณ€ํ™”", "์Œ์ƒ‰ ๋ณ€ํ™”",
"๊ณต๋ช…/๋ฐ˜๊ณต๋ช…", "์Œํ–ฅ ์ง„๋™", "์ดˆ์ŒํŒŒ/์ €์ŒํŒŒ ๋ฐœ์ƒ", "์Œํ–ฅ ์ง‘์ค‘/๋ถ„์‚ฐ",
"์Œํ–ฅ ๋ฐ˜์‚ฌ/ํก์ˆ˜", "์Œํ–ฅ ๋„ํ”Œ๋Ÿฌ ํšจ๊ณผ", "์ŒํŒŒ ๊ฐ„์„ญ", "์Œํ–ฅ ๊ณต์ง„",
"์ง„๋™ ํŒจํ„ด ๋ณ€ํ™”", "ํƒ€์•… ํšจ๊ณผ", "์Œํ–ฅ ํ”ผ๋“œ๋ฐฑ", "์Œํ–ฅ ์ฐจํ/์ฆํญ",
"์†Œ๋ฆฌ ์ง€ํ–ฅ์„ฑ", "์Œํ–ฅ ์™œ๊ณก", "๋น„ํŠธ ์ƒ์„ฑ", "ํ•˜๋ชจ๋‹‰์Šค ์ƒ์„ฑ", "์ฃผํŒŒ์ˆ˜ ๋ณ€์กฐ",
"์Œํ–ฅ ์ถฉ๊ฒฉํŒŒ", "์Œํ–ฅ ํ•„ํ„ฐ๋ง"
],
"์ƒ๋ฌผํ•™์  ๋ณ€ํ™”": [
"์ƒ์žฅ/์œ„์ถ•", "์„ธํฌ ๋ถ„์—ด/์‚ฌ๋ฉธ", "์ƒ๋ฌผ ๋ฐœ๊ด‘", "์‹ ์ง„๋Œ€์‚ฌ ๋ณ€ํ™”", "๋ฉด์—ญ ๋ฐ˜์‘",
"ํ˜ธ๋ฅด๋ชฌ ๋ถ„๋น„", "์‹ ๊ฒฝ ๋ฐ˜์‘", "์œ ์ „์  ๋ฐœํ˜„", "์ ์‘/์ง„ํ™”", "์ƒ์ฒด๋ฆฌ๋“ฌ ๋ณ€ํ™”",
"์žฌ์ƒ/์น˜์œ ", "๋…ธํ™”/์„ฑ์ˆ™", "์ƒ์ฒด ๋ชจ๋ฐฉ ๋ณ€ํ™”", "๋ฐ”์ด์˜คํ•„๋ฆ„ ํ˜•์„ฑ", "์ƒ๋ฌผํ•™์  ๋ถ„ํ•ด",
"ํšจ์†Œ ํ™œ์„ฑํ™”/๋น„ํ™œ์„ฑํ™”", "์ƒ๋ฌผํ•™์  ์‹ ํ˜ธ ์ „๋‹ฌ", "์ŠคํŠธ๋ ˆ์Šค ๋ฐ˜์‘", "์ฒด์˜จ ์กฐ์ ˆ",
"์ƒ๋ฌผํ•™์  ์‹œ๊ณ„ ๋ณ€ํ™”", "์„ธํฌ์™ธ ๊ธฐ์งˆ ๋ณ€ํ™”", "์ƒ์ฒด ์—ญํ•™์  ๋ฐ˜์‘", "์„ธํฌ ์šด๋™์„ฑ",
"์„ธํฌ ๊ทน์„ฑ ๋ณ€ํ™”", "์˜์–‘ ์ƒํƒœ ๋ณ€ํ™”"
],
"ํ™˜๊ฒฝ ์ƒํ˜ธ์ž‘์šฉ": [
"์˜จ๋„ ๋ฐ˜์‘", "์Šต๋„ ๋ฐ˜์‘", "๊ธฐ์•• ๋ฐ˜์‘", "์ค‘๋ ฅ ๋ฐ˜์‘", "์ž๊ธฐ์žฅ ๋ฐ˜์‘",
"๋น› ๋ฐ˜์‘", "์†Œ๋ฆฌ ๋ฐ˜์‘", "ํ™”ํ•™ ๋ฌผ์งˆ ๊ฐ์ง€", "๊ธฐ๊ณ„์  ์ž๊ทน ๊ฐ์ง€", "์ „๊ธฐ ์ž๊ทน ๋ฐ˜์‘",
"๋ฐฉ์‚ฌ์„  ๋ฐ˜์‘", "์ง„๋™ ๊ฐ์ง€", "pH ๋ฐ˜์‘", "์šฉ๋งค ๋ฐ˜์‘", "๊ธฐ์ฒด ๊ตํ™˜",
"ํ™˜๊ฒฝ ์˜ค์—ผ ๋ฐ˜์‘", "๋‚ ์”จ ๋ฐ˜์‘", "๊ณ„์ ˆ ๋ณ€ํ™” ๋ฐ˜์‘", "์ผ์ฃผ๊ธฐ ๋ฐ˜์‘", "์ƒํƒœ๊ณ„ ์ƒํ˜ธ์ž‘์šฉ",
"๊ณต์ƒ/๊ฒฝ์Ÿ ๋ฐ˜์‘", "ํฌ์‹/ํ”ผ์‹ ๊ด€๊ณ„", "๊ตฐ์ง‘ ํ˜•์„ฑ", "์˜์—ญ ์„ค์ •", "์ด์ฃผ/์ •์ฐฉ ํŒจํ„ด"
],
"์„ผ์„œ ๊ธฐ๋Šฅ": [
"์‹œ๊ฐ ์„ผ์„œ/๊ฐ์ง€", "์ฒญ๊ฐ ์„ผ์„œ/๊ฐ์ง€", "์ด‰๊ฐ ์„ผ์„œ/๊ฐ์ง€", "๋ฏธ๊ฐ ์„ผ์„œ/๊ฐ์ง€", "ํ›„๊ฐ ์„ผ์„œ/๊ฐ์ง€",
"์˜จ๋„ ์„ผ์„œ/๊ฐ์ง€", "์Šต๋„ ์„ผ์„œ/๊ฐ์ง€", "์••๋ ฅ ์„ผ์„œ/๊ฐ์ง€", "๊ฐ€์†๋„ ์„ผ์„œ/๊ฐ์ง€", "ํšŒ์ „ ์„ผ์„œ/๊ฐ์ง€",
"๊ทผ์ ‘ ์„ผ์„œ/๊ฐ์ง€", "์œ„์น˜ ์„ผ์„œ/๊ฐ์ง€", "์šด๋™ ์„ผ์„œ/๊ฐ์ง€", "๊ฐ€์Šค ์„ผ์„œ/๊ฐ์ง€", "์ ์™ธ์„  ์„ผ์„œ/๊ฐ์ง€",
"์ž์™ธ์„  ์„ผ์„œ/๊ฐ์ง€", "๋ฐฉ์‚ฌ์„  ์„ผ์„œ/๊ฐ์ง€", "์ž๊ธฐ์žฅ ์„ผ์„œ/๊ฐ์ง€", "์ „๊ธฐ์žฅ ์„ผ์„œ/๊ฐ์ง€", "ํ™”ํ•™๋ฌผ์งˆ ์„ผ์„œ/๊ฐ์ง€",
"์ƒ์ฒด์‹ ํ˜ธ ์„ผ์„œ/๊ฐ์ง€", "์ง„๋™ ์„ผ์„œ/๊ฐ์ง€", "์†Œ์Œ ์„ผ์„œ/๊ฐ์ง€", "๋น› ์„ธ๊ธฐ ์„ผ์„œ/๊ฐ์ง€", "๋น› ํŒŒ์žฅ ์„ผ์„œ/๊ฐ์ง€",
"๊ธฐ์šธ๊ธฐ ์„ผ์„œ/๊ฐ์ง€", "pH ์„ผ์„œ/๊ฐ์ง€", "์ „๋ฅ˜ ์„ผ์„œ/๊ฐ์ง€", "์ „์•• ์„ผ์„œ/๊ฐ์ง€", "์ด๋ฏธ์ง€ ์„ผ์„œ/๊ฐ์ง€",
"๊ฑฐ๋ฆฌ ์„ผ์„œ/๊ฐ์ง€", "๊นŠ์ด ์„ผ์„œ/๊ฐ์ง€", "์ค‘๋ ฅ ์„ผ์„œ/๊ฐ์ง€", "์†๋„ ์„ผ์„œ/๊ฐ์ง€", "ํ๋ฆ„ ์„ผ์„œ/๊ฐ์ง€",
"์ˆ˜์œ„ ์„ผ์„œ/๊ฐ์ง€", "ํƒ๋„ ์„ผ์„œ/๊ฐ์ง€", "์—ผ๋„ ์„ผ์„œ/๊ฐ์ง€", "๊ธˆ์† ๊ฐ์ง€", "์••์ „ ์„ผ์„œ/๊ฐ์ง€",
"๊ด‘์ „ ์„ผ์„œ/๊ฐ์ง€", "์—ด์ „๋Œ€ ์„ผ์„œ/๊ฐ์ง€", "ํ™€ ํšจ๊ณผ ์„ผ์„œ/๊ฐ์ง€", "์ดˆ์ŒํŒŒ ์„ผ์„œ/๊ฐ์ง€", "๋ ˆ์ด๋” ์„ผ์„œ/๊ฐ์ง€",
"๋ผ์ด๋‹ค ์„ผ์„œ/๊ฐ์ง€", "ํ„ฐ์น˜ ์„ผ์„œ/๊ฐ์ง€", "์ œ์Šค์ฒ˜ ์„ผ์„œ/๊ฐ์ง€", "์‹ฌ๋ฐ• ์„ผ์„œ/๊ฐ์ง€", "ํ˜ˆ์•• ์„ผ์„œ/๊ฐ์ง€"
]
}
##############################################################################
# Gemini API ํ˜ธ์ถœ ํ•จ์ˆ˜
##############################################################################
def query_gemini_api(prompt):
try:
model = genai.GenerativeModel('gemini-2.0-flash-thinking-exp-01-21')
response = model.generate_content(prompt)
try:
if hasattr(response, 'text'):
return response.text
if hasattr(response, 'candidates') and response.candidates:
candidate = response.candidates[0]
if hasattr(candidate, 'content'):
content = candidate.content
if hasattr(content, 'parts') and content.parts:
if len(content.parts) > 0:
return content.parts[0].text
if hasattr(response, 'parts') and response.parts:
if len(response.parts) > 0:
return response.parts[0].text
return "Unable to generate a response. API response structure is different than expected."
except Exception as inner_e:
logger.error(f"Error processing response: {inner_e}")
return f"An error occurred while processing the response: {str(inner_e)}"
except Exception as e:
logger.error(f"Error calling Gemini API: {e}")
if "API key not valid" in str(e):
return "API key is not valid. Please check your GEMINI_API_KEY environment variable."
return f"An error occurred while calling the API: {str(e)}"
##############################################################################
# ์„ค๋ช… ํ™•์žฅ ํ•จ์ˆ˜ (LLM ์ด์šฉ)
##############################################################################
def enhance_with_llm(base_description, obj_name, category):
prompt = f"""
๋‹ค์Œ์€ '{obj_name}'์˜ '{category}' ๊ด€๋ จ ๊ฐ„๋‹จํ•œ ์„ค๋ช…์ž…๋‹ˆ๋‹ค:
"{base_description}"
์œ„ ๋‚ด์šฉ์„ ๋ณด๋‹ค ๊ตฌ์ฒดํ™”ํ•˜์—ฌ,
1) ์ฐฝ์˜์ ์ธ ๋ชจ๋ธ/์ปจ์…‰/ํ˜•์ƒ์˜ ๋ณ€ํ™”์— ๋Œ€ํ•œ ์ดํ•ด,
2) ํ˜์‹  ํฌ์ธํŠธ์™€ ๊ธฐ๋Šฅ์„ฑ ๋“ฑ์„ ์ค‘์‹ฌ์œผ๋กœ
3~4๋ฌธ์žฅ์˜ ์•„์ด๋””์–ด๋กœ ํ™•์žฅํ•ด ์ฃผ์„ธ์š”.
"""
return query_gemini_api(prompt)
##############################################################################
# ๊ฐ ๊ฐ์ฒด์ˆ˜(1, 2, 3)์— ๋”ฐ๋ฅธ ๋ณ€ํ˜• ์•„์ด๋””์–ด ์ƒ์„ฑ
##############################################################################
def generate_single_object_transformation_for_category(obj, selected_category):
transformations = physical_transformation_categories.get(selected_category)
if not transformations:
return {}
transformation = choose_alternative(random.choice(transformations))
base_description = f"{obj}์ด(๊ฐ€) {transformation} ํ˜„์ƒ์„ ๋ณด์ธ๋‹ค"
return {selected_category: {"base": base_description, "enhanced": None}}
def generate_two_objects_interaction_for_category(obj1, obj2, selected_category):
transformations = physical_transformation_categories.get(selected_category)
if not transformations:
return {}
transformation = choose_alternative(random.choice(transformations))
template = random.choice([
"{obj1}์ด(๊ฐ€) {obj2}์— ๊ฒฐํ•ฉํ•˜์—ฌ {change}๊ฐ€ ๋ฐœ์ƒํ–ˆ๋‹ค",
"{obj1}๊ณผ(์™€) {obj2}์ด(๊ฐ€) ์ถฉ๋Œํ•˜๋ฉด์„œ {change}๊ฐ€ ์ผ์–ด๋‚ฌ๋‹ค"
])
base_description = template.format(obj1=obj1, obj2=obj2, change=transformation)
return {selected_category: {"base": base_description, "enhanced": None}}
def generate_three_objects_interaction_for_category(obj1, obj2, obj3, selected_category):
transformations = physical_transformation_categories.get(selected_category)
if not transformations:
return {}
transformation = choose_alternative(random.choice(transformations))
template = random.choice([
"{obj1}, {obj2}, {obj3}์ด(๊ฐ€) ์‚ผ๊ฐํ˜• ๊ตฌ์กฐ๋กœ ๊ฒฐํ•ฉํ•˜์—ฌ {change}๊ฐ€ ๋ฐœ์ƒํ–ˆ๋‹ค",
"{obj1}์ด(๊ฐ€) {obj2}์™€(๊ณผ) {obj3} ์‚ฌ์ด์—์„œ ๋งค๊ฐœ์ฒด ์—ญํ• ์„ ํ•˜๋ฉฐ {change}๋ฅผ ์ด‰์ง„ํ–ˆ๋‹ค"
])
base_description = template.format(obj1=obj1, obj2=obj2, obj3=obj3, change=transformation)
return {selected_category: {"base": base_description, "enhanced": None}}
##############################################################################
# ์ƒ์„ฑ๋œ ๊ธฐ๋ณธ ์„ค๋ช…์„ LLM์„ ํ†ตํ•ด ํ™•์žฅ (๊ฐ ์นดํ…Œ๊ณ ๋ฆฌ๋ณ„)
##############################################################################
def enhance_descriptions(results, objects):
obj_name = " ๋ฐ ".join([obj for obj in objects if obj])
for category, result in results.items():
result["enhanced"] = enhance_with_llm(result["base"], obj_name, category)
return results
##############################################################################
# ์‚ฌ์šฉ์ž ์ž…๋ ฅ(์ตœ๋Œ€ 3๊ฐœ ํ‚ค์›Œ๋“œ) + ์„ ํƒ ์นดํ…Œ๊ณ ๋ฆฌ โ†’ ๋ณ€ํ™” ์•„์ด๋””์–ด ์ƒ์„ฑ
##############################################################################
def generate_transformations(text1, text2, text3, selected_category):
if text2 and text3:
results = generate_three_objects_interaction_for_category(text1, text2, text3, selected_category)
objects = [text1, text2, text3]
elif text2:
results = generate_two_objects_interaction_for_category(text1, text2, selected_category)
objects = [text1, text2]
else:
results = generate_single_object_transformation_for_category(text1, selected_category)
objects = [text1]
return enhance_descriptions(results, objects)
##############################################################################
# ๊ฒฐ๊ณผ ํฌ๋งทํŒ…
##############################################################################
def format_results(results):
formatted = ""
for category, result in results.items():
formatted += f"## {category}\n**๊ธฐ๋ณธ ์•„์ด๋””์–ด**: {result['base']}\n\n**ํ™•์žฅ๋œ ์•„์ด๋””์–ด**: {result['enhanced']}\n\n---\n\n"
return formatted
##############################################################################
# Gradio UI์—์„œ ํ˜ธ์ถœ๋  ํ•จ์ˆ˜ (ํ…์ŠคํŠธ ์•„์ด๋””์–ด๋งŒ ์ƒ์„ฑ)
##############################################################################
def process_inputs(text1, text2, text3, selected_category, progress=gr.Progress()):
text1 = text1.strip() if text1 else None
text2 = text2.strip() if text2 else None
text3 = text3.strip() if text3 else None
if not text1:
return "์˜ค๋ฅ˜: ์ตœ์†Œ ํ•˜๋‚˜์˜ ํ‚ค์›Œ๋“œ๋ฅผ ์ž…๋ ฅํ•ด์ฃผ์„ธ์š”."
progress(0.05, desc="์•„์ด๋””์–ด ์ƒ์„ฑ ์ค€๋น„ ์ค‘...")
time.sleep(0.3)
progress(0.1, desc="์ฐฝ์˜์ ์ธ ์•„์ด๋””์–ด ์ƒ์„ฑ ์‹œ์ž‘...")
# ์นดํ…Œ๊ณ ๋ฆฌ์— ํ•ด๋‹นํ•˜๋Š” ์•„์ด๋””์–ด ์ƒ์„ฑ
results = generate_transformations(text1, text2, text3, selected_category)
progress(0.8, desc="๊ฒฐ๊ณผ ํฌ๋งทํŒ… ์ค‘...")
formatted = format_results(results)
progress(1.0, desc="์™„๋ฃŒ!")
return formatted
##############################################################################
# ์•„์ด๋””์–ด์™€ ์ด๋ฏธ์ง€๋ฅผ ํ•จ๊ป˜ ์ƒ์„ฑํ•˜๋Š” ์ตœ์ข… ํ•จ์ˆ˜
##############################################################################
def process_all(text1, text2, text3, selected_category, progress=gr.Progress()):
idea_result = process_inputs(text1, text2, text3, selected_category, progress)
image_result = generate_design_image(
idea_result,
seed=42,
randomize_seed=True,
width=1024,
height=1024,
num_inference_steps=4
)
return idea_result, image_result
##############################################################################
# API ํ‚ค ๊ฒฝ๊ณ  ๋ฉ”์‹œ์ง€
##############################################################################
def get_warning_message():
if not GEMINI_API_KEY:
return "โš ๏ธ ํ™˜๊ฒฝ ๋ณ€์ˆ˜ GEMINI_API_KEY๊ฐ€ ์„ค์ •๋˜์ง€ ์•Š์•˜์Šต๋‹ˆ๋‹ค. Gemini API ํ‚ค๋ฅผ ์„ค์ •ํ•˜์„ธ์š”."
return ""
##############################################################################
# Gradio UI
##############################################################################
with gr.Blocks(
title="ํ‚ค์›Œ๋“œ ๊ธฐ๋ฐ˜ ์ฐฝ์˜์  ๋ณ€ํ™” ์•„์ด๋””์–ด ๋ฐ ๋””์ž์ธ ์ƒ์„ฑ๊ธฐ",
theme=gr.themes.Soft(primary_hue="teal", secondary_hue="slate", neutral_hue="neutral")
) as demo:
gr.HTML("""
<style>
body {
background: linear-gradient(135deg, #e0eafc, #cfdef3);
font-family: 'Arial', sans-serif;
}
.gradio-container {
padding: 20px;
}
h1, h2 {
text-align: center;
}
h1 {
color: #333;
}
h2 {
color: #555;
}
.output {
background-color: #ffffff;
padding: 15px;
border-radius: 8px;
}
.gr-button {
background-color: #4CAF50;
color: white;
border: none;
border-radius: 4px;
padding: 8px 16px;
}
.progress-message {
color: #2196F3;
font-weight: bold;
margin-top: 10px;
}
</style>
""")
gr.Markdown("# ๐Ÿš€ ํ‚ค์›Œ๋“œ ๊ธฐ๋ฐ˜ ์ฐฝ์˜์  ๋ณ€ํ™” ์•„์ด๋””์–ด ๋ฐ ๋””์ž์ธ ์ƒ์„ฑ๊ธฐ")
gr.Markdown("์ž…๋ ฅํ•œ **ํ‚ค์›Œ๋“œ**(์ตœ๋Œ€ 3๊ฐœ)์™€ **์นดํ…Œ๊ณ ๋ฆฌ**๋ฅผ ๋ฐ”ํƒ•์œผ๋กœ, ์ฐฝ์˜์ ์ธ ๋ชจ๋ธ/์ปจ์…‰/ํ˜•์ƒ ๋ณ€ํ™” ์•„์ด๋””์–ด๋ฅผ ์ƒ์„ฑํ•˜๊ณ , ํ•ด๋‹น ํ™•์žฅ ์•„์ด๋””์–ด๋ฅผ ํ”„๋กฌํ”„ํŠธ๋กœ ํ•˜์—ฌ ๋””์ž์ธ ์ด๋ฏธ์ง€๋ฅผ ์ƒ์„ฑํ•ฉ๋‹ˆ๋‹ค.")
warning = gr.Markdown(get_warning_message())
with gr.Row():
with gr.Column(scale=1):
text_input1 = gr.Textbox(label="ํ‚ค์›Œ๋“œ 1 (ํ•„์ˆ˜)", placeholder="์˜ˆ: ์Šค๋งˆํŠธํฐ")
text_input2 = gr.Textbox(label="ํ‚ค์›Œ๋“œ 2 (์„ ํƒ)", placeholder="์˜ˆ: ์ธ๊ณต์ง€๋Šฅ")
text_input3 = gr.Textbox(label="ํ‚ค์›Œ๋“œ 3 (์„ ํƒ)", placeholder="์˜ˆ: ํ—ฌ์Šค์ผ€์–ด")
category_dropdown = gr.Dropdown(
label="์นดํ…Œ๊ณ ๋ฆฌ ์„ ํƒ",
choices=list(physical_transformation_categories.keys()),
value=list(physical_transformation_categories.keys())[0],
info="์ถœ๋ ฅํ•  ์นดํ…Œ๊ณ ๋ฆฌ๋ฅผ ์„ ํƒํ•˜์„ธ์š”."
)
status_msg = gr.Markdown("๐Ÿ’ก '์•„์ด๋””์–ด ์ƒ์„ฑํ•˜๊ธฐ' ๋ฒ„ํŠผ์„ ํด๋ฆญํ•˜๋ฉด ์„ ํƒํ•œ ์นดํ…Œ๊ณ ๋ฆฌ์— ํ•ด๋‹นํ•˜๋Š” ์•„์ด๋””์–ด์™€ ๋””์ž์ธ ์ด๋ฏธ์ง€๊ฐ€ ์ƒ์„ฑ๋ฉ๋‹ˆ๋‹ค.")
processing_indicator = gr.HTML("""
<div style="display: flex; justify-content: center; align-items: center; margin: 10px 0;">
<div style="border: 5px solid #f3f3f3; border-top: 5px solid #3498db; border-radius: 50%; width: 30px; height: 30px; animation: spin 2s linear infinite;"></div>
<p style="margin-left: 10px; font-weight: bold; color: #3498db;">์ฒ˜๋ฆฌ ์ค‘์ž…๋‹ˆ๋‹ค...</p>
</div>
<style>
@keyframes spin {
0% { transform: rotate(0deg); }
100% { transform: rotate(360deg); }
}
</style>
""", visible=False)
submit_button = gr.Button("์•„์ด๋””์–ด ์ƒ์„ฑํ•˜๊ธฐ", variant="primary")
with gr.Column(scale=2):
idea_output = gr.Markdown(label="์•„์ด๋””์–ด ๊ฒฐ๊ณผ")
generated_image = gr.Image(label="์ƒ์„ฑ๋œ ๋””์ž์ธ ์ด๋ฏธ์ง€", type="pil")
# ์˜ˆ์ œ
gr.Examples(
examples=[
["์Šค๋งˆํŠธํฐ", "", "", list(physical_transformation_categories.keys())[0]],
["์ž๋™์ฐจ", "", "", list(physical_transformation_categories.keys())[0]],
["์ž๋™์ฐจ", "์ธ๊ณต์ง€๋Šฅ", "", list(physical_transformation_categories.keys())[0]],
["๋“œ๋ก ", "์ธ๊ณต์ง€๋Šฅ", "", list(physical_transformation_categories.keys())[0]],
["์šด๋™ํ™”", "์›จ์–ด๋Ÿฌ๋ธ”", "๊ฑด๊ฐ•", list(physical_transformation_categories.keys())[0]],
],
inputs=[text_input1, text_input2, text_input3, category_dropdown],
)
# ์ฒ˜๋ฆฌ์ค‘ ์•„์ด์ฝ˜ ๋ณด์ด๊ธฐ
def show_processing_indicator():
return gr.update(visible=True)
# ์ฒ˜๋ฆฌ์ค‘ ์•„์ด์ฝ˜ ์ˆจ๊ธฐ๊ธฐ
def hide_processing_indicator():
return gr.update(visible=False)
# ๋ฒ„ํŠผ ํด๋ฆญ ์‹œ ์ฒ˜๋ฆฌ ๋กœ์ง
submit_button.click(
fn=show_processing_indicator,
inputs=None,
outputs=processing_indicator
).then(
fn=process_all,
inputs=[text_input1, text_input2, text_input3, category_dropdown],
outputs=[idea_output, generated_image]
).then(
fn=hide_processing_indicator,
inputs=None,
outputs=processing_indicator
)
# ๋ฉ”์ธ ์‹คํ–‰
if __name__ == "__main__":
demo.launch(debug=True)