Spaces:
Running
Running
import spaces | |
import gradio as gr | |
import torch | |
from PIL import Image | |
from diffusers import DiffusionPipeline | |
import random | |
from transformers import pipeline | |
torch.backends.cudnn.deterministic = True | |
torch.backends.cudnn.benchmark = False | |
torch.backends.cuda.matmul.allow_tf32 = True | |
# ๋ฒ์ญ ๋ชจ๋ธ ์ด๊ธฐํ | |
translator = pipeline("translation", model="Helsinki-NLP/opus-mt-ko-en") | |
# ๊ธฐ๋ณธ ๋ชจ๋ธ ๋ฐ LoRA ์ค์ | |
base_model = "black-forest-labs/FLUX.1-dev" | |
model_lora_repo = "Motas/Flux_Fashion_Photography_Style" # ํจ์ ๋ชจ๋ธ LoRA | |
clothes_lora_repo = "prithivMLmods/Canopus-Clothing-Flux-LoRA" # ์๋ฅ LoRA | |
pipe = DiffusionPipeline.from_pretrained(base_model, torch_dtype=torch.bfloat16) | |
pipe.to("cuda") | |
MAX_SEED = 2**32-1 | |
# ์์ ํ๋กฌํํธ ์ ์ | |
model_examples = [ | |
"professional fashion model wearing elegant black dress in studio lighting", | |
"fashion model in casual street wear, urban background", | |
"high fashion model in avant-garde outfit on runway" | |
] | |
clothes_examples = [ | |
"luxurious red evening gown with detailed embroidery", | |
"casual denim jacket with vintage wash", | |
"modern minimalist white blazer with clean lines" | |
] | |
def generate_fashion(prompt, mode, cfg_scale, steps, randomize_seed, seed, width, height, lora_scale, progress=gr.Progress(track_tqdm=True)): | |
# ํ๊ธ ๊ฐ์ง ๋ฐ ๋ฒ์ญ | |
def contains_korean(text): | |
return any(ord('๊ฐ') <= ord(char) <= ord('ํฃ') for char in text) | |
if contains_korean(prompt): | |
translated = translator(prompt)[0]['translation_text'] | |
actual_prompt = translated | |
else: | |
actual_prompt = prompt | |
# ๋ชจ๋์ ๋ฐ๋ฅธ LoRA ๋ฐ ํธ๋ฆฌ๊ฑฐ์๋ ์ค์ | |
if mode == "ํจ์ ๋ชจ๋ธ ์์ฑ": | |
pipe.load_lora_weights(model_lora_repo) | |
trigger_word = "fashion photography, professional model" | |
else: | |
pipe.load_lora_weights(clothes_lora_repo) | |
trigger_word = "upper clothing, fashion item" | |
if randomize_seed: | |
seed = random.randint(0, MAX_SEED) | |
generator = torch.Generator(device="cuda").manual_seed(seed) | |
progress(0, "Starting fashion generation...") | |
for i in range(1, steps + 1): | |
if i % (steps // 10) == 0: | |
progress(i / steps * 100, f"Processing step {i} of {steps}...") | |
image = pipe( | |
prompt=f"{actual_prompt} {trigger_word}", | |
num_inference_steps=steps, | |
guidance_scale=cfg_scale, | |
width=width, | |
height=height, | |
generator=generator, | |
joint_attention_kwargs={"scale": lora_scale}, | |
).images[0] | |
progress(100, "Completed!") | |
return image, seed | |
def update_examples(mode): | |
if mode == "ํจ์ ๋ชจ๋ธ ์์ฑ": | |
return gr.Examples(examples=model_examples) | |
else: | |
return gr.Examples(examples=clothes_examples) | |
with gr.Blocks(theme="Yntec/HaleyCH_Theme_Orange") as app: | |
gr.Markdown("# ๐ญ Fashion AI Studio") | |
gr.Markdown("AI๋ฅผ ์ฌ์ฉํ์ฌ ํจ์ ๋ชจ๋ธ๊ณผ ์๋ฅ๋ฅผ ์์ฑํด๋ณด์ธ์") | |
with gr.Column(): | |
# ๋ชจ๋ ์ ํ | |
mode = gr.Radio( | |
choices=["ํจ์ ๋ชจ๋ธ ์์ฑ", "ํจ์ ์๋ฅ ์์ฑ"], | |
label="์์ฑ ๋ชจ๋", | |
value="ํจ์ ๋ชจ๋ธ ์์ฑ" | |
) | |
# ํ๋กฌํํธ ์ ๋ ฅ | |
prompt = gr.TextArea( | |
label="โ๏ธ ํจ์ ์ค๋ช (ํ๊ธ ๋๋ ์์ด)", | |
placeholder="ํจ์ ๋ชจ๋ธ์ด๋ ์๋ฅ๋ฅผ ์ค๋ช ํ์ธ์...", | |
lines=3 | |
) | |
# ์์ ์น์ | |
with gr.Column() as example_container: | |
examples = gr.Examples( | |
examples=model_examples, | |
inputs=prompt, | |
label="์์ ํ๋กฌํํธ" | |
) | |
# ๊ฒฐ๊ณผ ์ด๋ฏธ์ง | |
result = gr.Image(label="์์ฑ๋ ์ด๋ฏธ์ง") | |
generate_button = gr.Button("๐ ์ด๋ฏธ์ง ์์ฑ") | |
# ๊ณ ๊ธ ์ค์ | |
with gr.Accordion("๐จ ๊ณ ๊ธ ์ค์ ", open=False): | |
with gr.Row(): | |
cfg_scale = gr.Slider(label="CFG Scale", minimum=1, maximum=20, value=7.0) | |
steps = gr.Slider(label="Steps", minimum=1, maximum=100, value=30) | |
lora_scale = gr.Slider(label="LoRA Scale", minimum=0, maximum=1, value=0.85) | |
with gr.Row(): | |
width = gr.Slider(label="Width", minimum=256, maximum=1536, value=512) | |
height = gr.Slider(label="Height", minimum=256, maximum=1536, value=768) | |
with gr.Row(): | |
randomize_seed = gr.Checkbox(True, label="์๋ ๋๋คํ") | |
seed = gr.Slider(label="Seed", minimum=0, maximum=MAX_SEED, value=42) | |
# ์ด๋ฒคํธ ํธ๋ค๋ฌ | |
mode.change( | |
fn=update_examples, | |
inputs=[mode], | |
outputs=[examples] | |
) | |
generate_button.click( | |
generate_fashion, | |
inputs=[prompt, mode, cfg_scale, steps, randomize_seed, seed, width, height, lora_scale], | |
outputs=[result, seed] | |
) | |
if __name__ == "__main__": | |
app.launch(share=True) |