|
import json |
|
import os |
|
import time |
|
import uuid |
|
import tempfile |
|
from PIL import Image |
|
import gradio as gr |
|
import base64 |
|
import mimetypes |
|
import logging |
|
|
|
from google import genai |
|
from google.genai import types |
|
|
|
|
|
from dotenv import load_dotenv |
|
load_dotenv() |
|
|
|
|
|
logging.basicConfig(level=logging.DEBUG, |
|
format='%(asctime)s - %(levelname)s - %(message)s') |
|
logger = logging.getLogger(__name__) |
|
|
|
|
|
def save_binary_file(file_name, data): |
|
logger.debug(f"ํ์ผ์ ์ด์ง ๋ฐ์ดํฐ ์ ์ฅ ์ค: {file_name}") |
|
with open(file_name, "wb") as f: |
|
f.write(data) |
|
logger.debug(f"ํ์ผ ์ ์ฅ ์๋ฃ: {file_name}") |
|
|
|
|
|
def generate(text, file_name, background_file=None, style_file=None, model="gemini-2.0-flash-exp-image-generation"): |
|
logger.debug(f"generate ํจ์ ์์ - ํ
์คํธ: '{text}', ํ์ผ๋ช
: '{file_name}', ๋ชจ๋ธ: '{model}'") |
|
|
|
try: |
|
|
|
effective_api_key = os.environ.get("GEMINI_API_KEY") |
|
if effective_api_key: |
|
logger.debug("ํ๊ฒฝ๋ณ์์์ API ํค ๋ถ๋ฌ์ด") |
|
else: |
|
logger.error("API ํค๊ฐ ํ๊ฒฝ๋ณ์์ ์ค์ ๋์ง ์์์ต๋๋ค.") |
|
raise ValueError("API ํค๊ฐ ํ์ํฉ๋๋ค.") |
|
|
|
client = genai.Client(api_key=effective_api_key) |
|
logger.debug("Gemini ํด๋ผ์ด์ธํธ ์ด๊ธฐํ ์๋ฃ.") |
|
|
|
|
|
uploaded_files = [] |
|
uploaded_files.append(client.files.upload(file=file_name)) |
|
logger.debug(f"์๋ณธ ํ์ผ ์
๋ก๋ ์๋ฃ. URI: {uploaded_files[0].uri}, MIME ํ์
: {uploaded_files[0].mime_type}") |
|
|
|
if background_file is not None: |
|
bg_file = client.files.upload(file=background_file) |
|
uploaded_files.append(bg_file) |
|
logger.debug(f"๋ฐฐ๊ฒฝ ํ์ผ ์
๋ก๋ ์๋ฃ. URI: {bg_file.uri}, MIME ํ์
: {bg_file.mime_type}") |
|
if style_file is not None: |
|
style_uploaded = client.files.upload(file=style_file) |
|
uploaded_files.append(style_uploaded) |
|
logger.debug(f"์คํ์ผ ํ์ผ ์
๋ก๋ ์๋ฃ. URI: {style_uploaded.uri}, MIME ํ์
: {style_uploaded.mime_type}") |
|
|
|
|
|
parts = [] |
|
|
|
parts.append( |
|
types.Part.from_uri( |
|
file_uri=uploaded_files[0].uri, |
|
mime_type=uploaded_files[0].mime_type, |
|
) |
|
) |
|
|
|
if background_file is not None: |
|
parts.append( |
|
types.Part.from_uri( |
|
file_uri=uploaded_files[1].uri, |
|
mime_type=uploaded_files[1].mime_type, |
|
) |
|
) |
|
|
|
if style_file is not None: |
|
|
|
style_index = 2 if background_file is not None else 1 |
|
parts.append( |
|
types.Part.from_uri( |
|
file_uri=uploaded_files[style_index].uri, |
|
mime_type=uploaded_files[style_index].mime_type, |
|
) |
|
) |
|
|
|
parts.append(types.Part.from_text(text=text)) |
|
|
|
contents = [ |
|
types.Content( |
|
role="user", |
|
parts=parts, |
|
), |
|
] |
|
logger.debug(f"์ปจํ
์ธ ๊ฐ์ฒด ์์ฑ ์๋ฃ: {contents}") |
|
|
|
generate_content_config = types.GenerateContentConfig( |
|
temperature=1, |
|
top_p=0.95, |
|
top_k=40, |
|
max_output_tokens=8192, |
|
response_modalities=[ |
|
"image", |
|
"text", |
|
], |
|
response_mime_type="text/plain", |
|
) |
|
logger.debug(f"์์ฑ ์ค์ : {generate_content_config}") |
|
|
|
with tempfile.NamedTemporaryFile(suffix=".png", delete=False) as tmp: |
|
temp_path = tmp.name |
|
logger.debug(f"์์ ํ์ผ ์์ฑ๋จ: {temp_path}") |
|
|
|
response_stream = client.models.generate_content_stream( |
|
model=model, |
|
contents=contents, |
|
config=generate_content_config, |
|
) |
|
|
|
logger.debug("์๋ต ์คํธ๋ฆผ ์ฒ๋ฆฌ ์์...") |
|
for chunk in response_stream: |
|
if not chunk.candidates or not chunk.candidates[0].content or not chunk.candidates[0].content.parts: |
|
logger.warning("chunk์ ํ๋ณด, ์ปจํ
์ธ , ๋๋ ํํธ๊ฐ ์์ต๋๋ค. ๊ฑด๋๋๋๋ค.") |
|
continue |
|
|
|
inline_data = chunk.candidates[0].content.parts[0].inline_data |
|
if inline_data: |
|
save_binary_file(temp_path, inline_data.data) |
|
logger.info(f"MIME ํ์
{inline_data.mime_type}์ ํ์ผ์ด ์ ์ฅ๋จ: {temp_path} (ํ๋กฌํํธ: {text})") |
|
else: |
|
logger.info(f"์์ ๋ ํ
์คํธ: {chunk.text}") |
|
print(chunk.text) |
|
|
|
logger.debug(f"Raw chunk: {chunk}") |
|
|
|
del uploaded_files |
|
logger.debug("์
๋ก๋๋ ํ์ผ ์ ๋ณด ์ญ์ ์๋ฃ.") |
|
return temp_path |
|
|
|
except Exception as e: |
|
logger.exception("์ด๋ฏธ์ง ์์ฑ ์ค ์ค๋ฅ ๋ฐ์:") |
|
return None |
|
|
|
|
|
def process_image_and_prompt(original_pil, prompt, background_pil=None, style_pil=None): |
|
logger.debug(f"process_image_and_prompt ํจ์ ์์ - ํ๋กฌํํธ: '{prompt}'") |
|
try: |
|
|
|
with tempfile.NamedTemporaryFile(suffix=".png", delete=False) as tmp: |
|
original_path = tmp.name |
|
original_pil.save(original_path) |
|
logger.debug(f"์๋ณธ ์ด๋ฏธ์ง ์ ์ฅ ์๋ฃ: {original_path}") |
|
|
|
|
|
background_path = None |
|
if background_pil is not None: |
|
with tempfile.NamedTemporaryFile(suffix=".png", delete=False) as tmp_bg: |
|
background_path = tmp_bg.name |
|
background_pil.save(background_path) |
|
logger.debug(f"๋ฐฐ๊ฒฝ ์ด๋ฏธ์ง ์ ์ฅ ์๋ฃ: {background_path}") |
|
|
|
|
|
style_path = None |
|
if style_pil is not None: |
|
with tempfile.NamedTemporaryFile(suffix=".png", delete=False) as tmp_style: |
|
style_path = tmp_style.name |
|
style_pil.save(style_path) |
|
logger.debug(f"์คํ์ผ ์ด๋ฏธ์ง ์ ์ฅ ์๋ฃ: {style_path}") |
|
|
|
input_text = prompt |
|
model = "gemini-2.0-flash-exp-image-generation" |
|
|
|
gemma_edited_image_path = generate(text=input_text, |
|
file_name=original_path, |
|
background_file=background_path, |
|
style_file=style_path, |
|
model=model) |
|
|
|
if gemma_edited_image_path: |
|
logger.debug(f"์ด๋ฏธ์ง ์์ฑ ์๋ฃ. ๊ฒฝ๋ก: {gemma_edited_image_path}") |
|
result_img = Image.open(gemma_edited_image_path) |
|
if result_img.mode == "RGBA": |
|
result_img = result_img.convert("RGB") |
|
return [result_img] |
|
else: |
|
logger.error("generate ํจ์์์ None ๋ฐํ๋จ.") |
|
return [] |
|
|
|
except Exception as e: |
|
logger.exception("process_image_and_prompt ํจ์์์ ์ค๋ฅ ๋ฐ์:") |
|
return [] |
|
|
|
|
|
|
|
with gr.Blocks() as demo: |
|
gr.HTML( |
|
""" |
|
<div style='display: flex; align-items: center; justify-content: center; gap: 20px'> |
|
<div style="background-color: var(--block-background-fill); border-radius: 8px"> |
|
<img src="https://www.gstatic.com/lamda/images/gemini_favicon_f069958c85030456e93de685481c559f160ea06b.png" style="width: 100px; height: 100px;"> |
|
</div> |
|
<div> |
|
<h1>Gemini๋ฅผ ์ด์ฉํ ์ด๋ฏธ์ง ํธ์ง</h1> |
|
<p>Gemini API ํค๋ ํ๊ฒฝ๋ณ์(GEMINI_API_KEY)๋ก ์ค์ ๋์ด ์์ต๋๋ค.</p> |
|
</div> |
|
</div> |
|
""" |
|
) |
|
gr.Markdown("์๋ณธ, ๋ฐฐ๊ฒฝ, ์คํ์ผ ์ด๋ฏธ์ง๋ฅผ ์
๋ก๋ํ๊ณ , ํธ์งํ ๋ด์ฉ์ ์
๋ ฅํ์ธ์.") |
|
|
|
with gr.Row(): |
|
with gr.Column(): |
|
original_input = gr.Image(type="pil", label="์๋ณธ ์ด๋ฏธ์ง", image_mode="RGBA") |
|
background_input = gr.Image(type="pil", label="๋ฐฐ๊ฒฝ ์ด๋ฏธ์ง", image_mode="RGBA") |
|
style_input = gr.Image(type="pil", label="์คํ์ผ ์ด๋ฏธ์ง", image_mode="RGBA") |
|
prompt_input = gr.Textbox( |
|
lines=2, |
|
placeholder="ํธ์งํ ๋ด์ฉ์ ์
๋ ฅํ์ธ์...", |
|
label="ํธ์ง ํ๋กฌํํธ" |
|
) |
|
submit_btn = gr.Button("์ด๋ฏธ์ง ํธ์ง ์คํ") |
|
with gr.Column(): |
|
output_gallery = gr.Gallery(label="ํธ์ง ๊ฒฐ๊ณผ") |
|
|
|
submit_btn.click( |
|
fn=process_image_and_prompt, |
|
inputs=[original_input, prompt_input, background_input, style_input], |
|
outputs=output_gallery, |
|
) |
|
|
|
|
|
|
|
dummy_original = Image.new("RGBA", (100, 100), color="red") |
|
dummy_background = Image.new("RGBA", (100, 100), color="green") |
|
dummy_style = Image.new("RGBA", (100, 100), color="blue") |
|
dummy_prompt = "์ด๋ฏธ์ง ํธ์ง: ์๋ณธ์ ๋นจ๊ฐ, ๋ฐฐ๊ฒฝ์ ์ด๋ก, ์คํ์ผ์ ํ๋" |
|
|
|
logger.info("process_image_and_prompt ํจ์๋ฅผ ์ง์ ํธ์ถํฉ๋๋ค...") |
|
result = process_image_and_prompt(dummy_original, dummy_prompt, dummy_background, dummy_style) |
|
|
|
if result: |
|
logger.info(f"์ง์ ํธ์ถ ์ฑ๊ณต. ๊ฒฐ๊ณผ: {result}") |
|
else: |
|
logger.error("์ง์ ํธ์ถ ์คํจ.") |
|
|
|
demo.launch(share=True) |
|
|