Update app.py
Browse files
app.py
CHANGED
@@ -1,12 +1,9 @@
|
|
1 |
import json
|
2 |
import os
|
3 |
import time
|
4 |
-
import uuid
|
5 |
import tempfile
|
6 |
from PIL import Image
|
7 |
import gradio as gr
|
8 |
-
import base64
|
9 |
-
import mimetypes
|
10 |
import logging
|
11 |
from io import BytesIO
|
12 |
|
@@ -30,8 +27,8 @@ def save_binary_file(file_name, data):
|
|
30 |
logger.debug(f"ํ์ผ ์ ์ฅ ์๋ฃ: {file_name}")
|
31 |
|
32 |
|
33 |
-
def
|
34 |
-
logger.debug(f"
|
35 |
|
36 |
try:
|
37 |
# API ํค๋ ํ๊ฒฝ๋ณ์์์ ๋ถ๋ฌ์ด
|
@@ -46,33 +43,12 @@ def generate(text, original_image_path, background_image_path=None, style_image_
|
|
46 |
logger.debug("Gemini ํด๋ผ์ด์ธํธ ์ด๊ธฐํ ์๋ฃ.")
|
47 |
|
48 |
# PIL ์ด๋ฏธ์ง ๊ฐ์ฒด๋ก ๋ณํ
|
49 |
-
|
|
|
50 |
|
51 |
# ์ปจํ
์ธ ๋ฆฌ์คํธ ์์ฑ (๊ณต์ ๋ฌธ์ ๋ฐฉ์๋๋ก)
|
52 |
-
|
53 |
-
|
54 |
-
# ํ
์คํธ ํ๋กฌํํธ๋ฅผ ๋ช
ํํ๊ฒ ์์ฑ
|
55 |
-
prompt = text
|
56 |
-
if background_image_path and "๋ฐฐ๊ฒฝ" not in text.lower():
|
57 |
-
prompt += " ์๋ณธ ์ด๋ฏธ์ง์ ๋ฐฐ๊ฒฝ์ ๋ ๋ฒ์งธ ์
๋ก๋๋ ์ด๋ฏธ์ง๋ก ์์ ํ ๊ต์ฒดํด ์ฃผ์ธ์. ์ด๋ฏธ์ง๋ฅผ ์
๋ฐ์ดํธํ๊ณ ๊ฒฐ๊ณผ๋ฅผ ๋ณด์ฌ์ฃผ์ธ์."
|
58 |
-
if style_image_path and "์คํ์ผ" not in text.lower():
|
59 |
-
prompt += " ์ธ ๋ฒ์งธ ์ด๋ฏธ์ง์ ์คํ์ผ์ ์ ์ฒด์ ์ผ๋ก ์ ์ฉํด ์ฃผ์ธ์."
|
60 |
-
|
61 |
-
contents.append(prompt)
|
62 |
-
contents.append(original_img)
|
63 |
-
|
64 |
-
# ๋ฐฐ๊ฒฝ ์ด๋ฏธ์ง ์ถ๊ฐ (์๋ ๊ฒฝ์ฐ)
|
65 |
-
if background_image_path:
|
66 |
-
background_img = Image.open(background_image_path)
|
67 |
-
contents.append(background_img)
|
68 |
-
logger.debug("๋ฐฐ๊ฒฝ ์ด๋ฏธ์ง ์ถ๊ฐ๋จ")
|
69 |
-
|
70 |
-
# ์คํ์ผ ์ด๋ฏธ์ง ์ถ๊ฐ (์๋ ๊ฒฝ์ฐ)
|
71 |
-
if style_image_path:
|
72 |
-
style_img = Image.open(style_image_path)
|
73 |
-
contents.append(style_img)
|
74 |
-
logger.debug("์คํ์ผ ์ด๋ฏธ์ง ์ถ๊ฐ๋จ")
|
75 |
-
|
76 |
logger.debug(f"์ปจํ
์ธ ๊ฐ์ฒด ์์ฑ ์๋ฃ: {len(contents)} ์์ดํ
")
|
77 |
|
78 |
# ์์ฑ ์ค์
|
@@ -81,7 +57,7 @@ def generate(text, original_image_path, background_image_path=None, style_image_
|
|
81 |
top_p=0.95,
|
82 |
top_k=40,
|
83 |
max_output_tokens=8192,
|
84 |
-
response_modalities=["text", "image"],
|
85 |
)
|
86 |
logger.debug(f"์์ฑ ์ค์ : {generate_content_config}")
|
87 |
|
@@ -89,7 +65,7 @@ def generate(text, original_image_path, background_image_path=None, style_image_
|
|
89 |
temp_path = tmp.name
|
90 |
logger.debug(f"์์ ํ์ผ ์์ฑ๋จ: {temp_path}")
|
91 |
|
92 |
-
#
|
93 |
response = client.models.generate_content(
|
94 |
model=model,
|
95 |
contents=contents,
|
@@ -98,12 +74,14 @@ def generate(text, original_image_path, background_image_path=None, style_image_
|
|
98 |
|
99 |
logger.debug("์๋ต ์ฒ๋ฆฌ ์์...")
|
100 |
|
101 |
-
# ์๋ต์์
|
102 |
image_saved = False
|
|
|
|
|
103 |
for part in response.candidates[0].content.parts:
|
104 |
if hasattr(part, 'text') and part.text:
|
|
|
105 |
logger.info(f"์์ ๋ ํ
์คํธ: {part.text}")
|
106 |
-
print(part.text)
|
107 |
elif hasattr(part, 'inline_data') and part.inline_data:
|
108 |
save_binary_file(temp_path, part.inline_data.data)
|
109 |
logger.info(f"MIME ํ์
{part.inline_data.mime_type}์ ํ์ผ์ด ์ ์ฅ๋จ: {temp_path}")
|
@@ -111,84 +89,67 @@ def generate(text, original_image_path, background_image_path=None, style_image_
|
|
111 |
|
112 |
if not image_saved:
|
113 |
logger.warning("์ด๋ฏธ์ง๊ฐ ์์ฑ๋์ง ์์์ต๋๋ค.")
|
114 |
-
return None
|
115 |
|
116 |
logger.debug("์ด๋ฏธ์ง ์์ฑ ์๋ฃ.")
|
117 |
-
return temp_path
|
118 |
|
119 |
except Exception as e:
|
120 |
logger.exception("์ด๋ฏธ์ง ์์ฑ ์ค ์ค๋ฅ ๋ฐ์:")
|
121 |
-
return None # ์ค๋ฅ ๋ฐ์ ์ None ๋ฐํ
|
122 |
|
123 |
|
124 |
-
def
|
125 |
-
logger.debug(f"
|
126 |
try:
|
127 |
-
#
|
128 |
-
|
129 |
-
|
130 |
-
|
131 |
-
|
132 |
-
|
133 |
-
|
134 |
-
|
135 |
-
|
136 |
-
|
137 |
-
|
138 |
-
|
139 |
-
|
140 |
-
|
141 |
-
|
142 |
-
|
143 |
-
|
144 |
-
|
145 |
-
|
146 |
-
|
147 |
-
|
148 |
-
|
149 |
-
|
150 |
-
|
151 |
-
if background_path and style_path:
|
152 |
-
prompt = "์๋ณธ ์ด๋ฏธ์ง์ ์ธ๋ฌผ์ ์ ์งํ๋ฉด์ ๋ฐฐ๊ฒฝ์ ๋ ๋ฒ์งธ ์ด๋ฏธ์ง๋ก ๊ต์ฒดํ๊ณ ์ธ ๋ฒ์งธ ์ด๋ฏธ์ง์ ์คํ์ผ์ ์ ์ฉํด ์ฃผ์ธ์. Please replace the background with the second image while keeping the person, and apply the style of the third image."
|
153 |
-
elif background_path:
|
154 |
-
prompt = "์๋ณธ ์ด๋ฏธ์ง์ ์ธ๋ฌผ์ ์ ์งํ๋ฉด์ ๋ฐฐ๊ฒฝ์ ๋ ๋ฒ์งธ ์ด๋ฏธ์ง๋ก ๊ต์ฒดํด ์ฃผ์ธ์. Please replace the background with the second image while keeping the person."
|
155 |
-
elif style_path:
|
156 |
-
prompt = "์๋ณธ ์ด๋ฏธ์ง์ ๋ ๋ฒ์งธ ์ด๋ฏธ์ง์ ์คํ์ผ์ ์ ์ฉํด ์ฃผ์ธ์. Please apply the style of the second image to the original image."
|
157 |
-
|
158 |
-
model = "gemini-2.0-flash-exp-image-generation"
|
159 |
-
|
160 |
-
gemma_edited_image_path = generate(
|
161 |
-
text=prompt,
|
162 |
-
original_image_path=original_path,
|
163 |
-
background_image_path=background_path,
|
164 |
-
style_image_path=style_path,
|
165 |
-
model=model
|
166 |
)
|
167 |
|
168 |
-
|
169 |
-
|
170 |
-
|
|
|
171 |
if result_img.mode == "RGBA":
|
172 |
result_img = result_img.convert("RGB")
|
173 |
|
174 |
# ์์ ํ์ผ ์ ๋ฆฌ
|
175 |
try:
|
176 |
-
os.unlink(
|
177 |
-
|
178 |
-
os.unlink(background_path)
|
179 |
-
if style_path:
|
180 |
-
os.unlink(style_path)
|
181 |
except Exception as e:
|
182 |
logger.warning(f"์์ ํ์ผ ์ญ์ ์ค ์ค๋ฅ: {str(e)}")
|
183 |
|
184 |
-
return [result_img]
|
185 |
else:
|
186 |
-
logger.error("
|
187 |
-
return [] # ์ค๋ฅ ์ ๋น ๋ฆฌ์คํธ ๋ฐํ
|
188 |
|
189 |
except Exception as e:
|
190 |
-
logger.exception("
|
191 |
-
return [] # ์ค๋ฅ ์ ๋น
|
192 |
|
193 |
|
194 |
# --- Gradio ์ธํฐํ์ด์ค ๊ตฌ์ฑ ---
|
@@ -200,46 +161,47 @@ with gr.Blocks() as demo:
|
|
200 |
<img src="https://www.gstatic.com/lamda/images/gemini_favicon_f069958c85030456e93de685481c559f160ea06b.png" style="width: 100px; height: 100px;">
|
201 |
</div>
|
202 |
<div>
|
203 |
-
<h1>Gemini๋ฅผ ์ด์ฉํ ์ด๋ฏธ์ง
|
204 |
<p>Gemini API ํค๋ ํ๊ฒฝ๋ณ์(GEMINI_API_KEY)๋ก ์ค์ ๋์ด ์์ต๋๋ค.</p>
|
205 |
</div>
|
206 |
</div>
|
207 |
"""
|
208 |
)
|
209 |
-
gr.Markdown("
|
210 |
|
211 |
with gr.Row():
|
212 |
with gr.Column():
|
213 |
-
|
214 |
-
|
215 |
-
style_input = gr.Image(type="pil", label="์คํ์ผ ์ด๋ฏธ์ง", image_mode="RGB")
|
216 |
prompt_input = gr.Textbox(
|
217 |
lines=2,
|
218 |
-
placeholder="
|
219 |
-
label="
|
220 |
)
|
221 |
-
submit_btn = gr.Button("์ด๋ฏธ์ง
|
222 |
with gr.Column():
|
223 |
-
output_gallery = gr.Gallery(label="
|
224 |
-
output_text = gr.Textbox(label="
|
225 |
|
226 |
submit_btn.click(
|
227 |
-
fn=
|
228 |
-
inputs=[
|
229 |
-
outputs=output_gallery,
|
230 |
)
|
231 |
|
232 |
gr.HTML("""
|
233 |
<div style="margin-top: 20px; padding: 10px; background-color: #f8f9fa; border-radius: 8px;">
|
234 |
<h3>์ฌ์ฉ ํ:</h3>
|
235 |
<ul>
|
236 |
-
<li><strong
|
237 |
-
<li><strong
|
238 |
-
<li><strong
|
239 |
<li><strong>์์ด ํ๋กฌํํธ:</strong> ๋ ๋์ ๊ฒฐ๊ณผ๋ฅผ ์ํด ์์ด์ ํ๊ตญ์ด๋ฅผ ํจ๊ป ์ฌ์ฉํด ๋ณด์ธ์.</li>
|
|
|
240 |
</ul>
|
241 |
</div>
|
242 |
""")
|
243 |
|
244 |
# --- ์คํ ---
|
245 |
-
|
|
|
|
1 |
import json
|
2 |
import os
|
3 |
import time
|
|
|
4 |
import tempfile
|
5 |
from PIL import Image
|
6 |
import gradio as gr
|
|
|
|
|
7 |
import logging
|
8 |
from io import BytesIO
|
9 |
|
|
|
27 |
logger.debug(f"ํ์ผ ์ ์ฅ ์๋ฃ: {file_name}")
|
28 |
|
29 |
|
30 |
+
def merge_product_and_person(person_img_path, product_img_path, prompt, model="gemini-2.0-flash-exp-image-generation"):
|
31 |
+
logger.debug(f"merge_product_and_person ํจ์ ์์ - ํ๋กฌํํธ: '{prompt}'")
|
32 |
|
33 |
try:
|
34 |
# API ํค๋ ํ๊ฒฝ๋ณ์์์ ๋ถ๋ฌ์ด
|
|
|
43 |
logger.debug("Gemini ํด๋ผ์ด์ธํธ ์ด๊ธฐํ ์๋ฃ.")
|
44 |
|
45 |
# PIL ์ด๋ฏธ์ง ๊ฐ์ฒด๋ก ๋ณํ
|
46 |
+
person_img = Image.open(person_img_path)
|
47 |
+
product_img = Image.open(product_img_path)
|
48 |
|
49 |
# ์ปจํ
์ธ ๋ฆฌ์คํธ ์์ฑ (๊ณต์ ๋ฌธ์ ๋ฐฉ์๋๋ก)
|
50 |
+
# ์์: ์ฌ๋ ์ด๋ฏธ์ง, ์ํ ์ด๋ฏธ์ง, ํ๋กฌํํธ ํ
์คํธ
|
51 |
+
contents = [person_img, product_img, prompt]
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
52 |
logger.debug(f"์ปจํ
์ธ ๊ฐ์ฒด ์์ฑ ์๋ฃ: {len(contents)} ์์ดํ
")
|
53 |
|
54 |
# ์์ฑ ์ค์
|
|
|
57 |
top_p=0.95,
|
58 |
top_k=40,
|
59 |
max_output_tokens=8192,
|
60 |
+
response_modalities=["text", "image"],
|
61 |
)
|
62 |
logger.debug(f"์์ฑ ์ค์ : {generate_content_config}")
|
63 |
|
|
|
65 |
temp_path = tmp.name
|
66 |
logger.debug(f"์์ ํ์ผ ์์ฑ๋จ: {temp_path}")
|
67 |
|
68 |
+
# ๋จ์ผ ์์ฒญ์ผ๋ก ์ด๋ฏธ์ง ์์ฑ
|
69 |
response = client.models.generate_content(
|
70 |
model=model,
|
71 |
contents=contents,
|
|
|
74 |
|
75 |
logger.debug("์๋ต ์ฒ๋ฆฌ ์์...")
|
76 |
|
77 |
+
# ์๋ต์์ ์ด๋ฏธ์ง์ ํ
์คํธ ์ถ์ถ
|
78 |
image_saved = False
|
79 |
+
response_text = ""
|
80 |
+
|
81 |
for part in response.candidates[0].content.parts:
|
82 |
if hasattr(part, 'text') and part.text:
|
83 |
+
response_text += part.text
|
84 |
logger.info(f"์์ ๋ ํ
์คํธ: {part.text}")
|
|
|
85 |
elif hasattr(part, 'inline_data') and part.inline_data:
|
86 |
save_binary_file(temp_path, part.inline_data.data)
|
87 |
logger.info(f"MIME ํ์
{part.inline_data.mime_type}์ ํ์ผ์ด ์ ์ฅ๋จ: {temp_path}")
|
|
|
89 |
|
90 |
if not image_saved:
|
91 |
logger.warning("์ด๋ฏธ์ง๊ฐ ์์ฑ๋์ง ์์์ต๋๋ค.")
|
92 |
+
return None, response_text
|
93 |
|
94 |
logger.debug("์ด๋ฏธ์ง ์์ฑ ์๋ฃ.")
|
95 |
+
return temp_path, response_text
|
96 |
|
97 |
except Exception as e:
|
98 |
logger.exception("์ด๋ฏธ์ง ์์ฑ ์ค ์ค๋ฅ ๋ฐ์:")
|
99 |
+
return None, str(e) # ์ค๋ฅ ๋ฐ์ ์ None๊ณผ ์ค๋ฅ ๋ฉ์์ง ๋ฐํ
|
100 |
|
101 |
|
102 |
+
def process_images_and_prompt(person_pil, product_pil, prompt):
|
103 |
+
logger.debug(f"process_images_and_prompt ํจ์ ์์ - ํ๋กฌํํธ: '{prompt}'")
|
104 |
try:
|
105 |
+
# ๊ธฐ๋ณธ ํ๋กฌํํธ ์ค์ (๋น์ด์๋ ๊ฒฝ์ฐ)
|
106 |
+
if not prompt or not prompt.strip():
|
107 |
+
prompt = "์ด ์ฌ๋์ด ์ด ์ํ์ ์ฌ์ฉํ๋ ๋ชจ์ต์ ์์ฐ์ค๋ฝ๊ฒ ๋ณด์ฌ์ฃผ์ธ์. ์ํ์ ์ ๋ณด์ด๊ฒ ํด์ฃผ์ธ์. Make a natural composite image showing this person using this product. Make sure the product is clearly visible."
|
108 |
+
|
109 |
+
# ํ๋กฌํํธ์ ์์ด๊ฐ ์์ผ๋ฉด ์์ด ํ๋กฌํํธ ์ถ๊ฐ (๋ ๋์ ๊ฒฐ๊ณผ๋ฅผ ์ํด)
|
110 |
+
if not any(ord(c) < 128 for c in prompt):
|
111 |
+
prompt += " Create a realistic composite image of this person with this product."
|
112 |
+
|
113 |
+
# ์ด๋ฏธ์ง ์ ์ฅ
|
114 |
+
with tempfile.NamedTemporaryFile(suffix=".png", delete=False) as tmp_person:
|
115 |
+
person_path = tmp_person.name
|
116 |
+
person_pil.save(person_path)
|
117 |
+
logger.debug(f"์ฌ๋ ์ด๋ฏธ์ง ์ ์ฅ ์๋ฃ: {person_path}")
|
118 |
+
|
119 |
+
with tempfile.NamedTemporaryFile(suffix=".png", delete=False) as tmp_product:
|
120 |
+
product_path = tmp_product.name
|
121 |
+
product_pil.save(product_path)
|
122 |
+
logger.debug(f"์ํ ์ด๋ฏธ์ง ์ ์ฅ ์๋ฃ: {product_path}")
|
123 |
+
|
124 |
+
# ์ด๋ฏธ์ง ํฉ์ฑ ์คํ
|
125 |
+
result_path, response_text = merge_product_and_person(
|
126 |
+
person_img_path=person_path,
|
127 |
+
product_img_path=product_path,
|
128 |
+
prompt=prompt
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
129 |
)
|
130 |
|
131 |
+
# ์ด๋ฏธ์ง ๋ฐํ ๋ฐ ์์ ํ์ผ ์ ๋ฆฌ
|
132 |
+
if result_path:
|
133 |
+
logger.debug(f"์ด๋ฏธ์ง ์์ฑ ์๋ฃ. ๊ฒฝ๋ก: {result_path}")
|
134 |
+
result_img = Image.open(result_path)
|
135 |
if result_img.mode == "RGBA":
|
136 |
result_img = result_img.convert("RGB")
|
137 |
|
138 |
# ์์ ํ์ผ ์ ๋ฆฌ
|
139 |
try:
|
140 |
+
os.unlink(person_path)
|
141 |
+
os.unlink(product_path)
|
|
|
|
|
|
|
142 |
except Exception as e:
|
143 |
logger.warning(f"์์ ํ์ผ ์ญ์ ์ค ์ค๋ฅ: {str(e)}")
|
144 |
|
145 |
+
return [result_img], response_text
|
146 |
else:
|
147 |
+
logger.error("merge_product_and_person ํจ์์์ None ๋ฐํ๋จ.")
|
148 |
+
return [], response_text # ์ค๋ฅ ์ ๋น ๋ฆฌ์คํธ ๋ฐํ
|
149 |
|
150 |
except Exception as e:
|
151 |
+
logger.exception("process_images_and_prompt ํจ์์์ ์ค๋ฅ ๋ฐ์:")
|
152 |
+
return [], str(e) # ์ค๋ฅ ์ ๋น ๋ฆฌ์คํธ์ ์ค๋ฅ ๋ฉ์์ง ๋ฐํ
|
153 |
|
154 |
|
155 |
# --- Gradio ์ธํฐํ์ด์ค ๊ตฌ์ฑ ---
|
|
|
161 |
<img src="https://www.gstatic.com/lamda/images/gemini_favicon_f069958c85030456e93de685481c559f160ea06b.png" style="width: 100px; height: 100px;">
|
162 |
</div>
|
163 |
<div>
|
164 |
+
<h1>Gemini๋ฅผ ์ด์ฉํ ์ํ-์ฌ๋ ์ด๋ฏธ์ง ํฉ์ฑ</h1>
|
165 |
<p>Gemini API ํค๋ ํ๊ฒฝ๋ณ์(GEMINI_API_KEY)๋ก ์ค์ ๋์ด ์์ต๋๋ค.</p>
|
166 |
</div>
|
167 |
</div>
|
168 |
"""
|
169 |
)
|
170 |
+
gr.Markdown("์ฌ๋ ์ด๋ฏธ์ง์ ์ํ ์ด๋ฏธ์ง๋ฅผ ์
๋ก๋ํ๊ณ , ์ด๋ป๊ฒ ํฉ์ฑํ ์ง ์ค๋ช
ํด์ฃผ์ธ์.")
|
171 |
|
172 |
with gr.Row():
|
173 |
with gr.Column():
|
174 |
+
person_input = gr.Image(type="pil", label="์ฌ๋ ์ด๋ฏธ์ง", image_mode="RGB")
|
175 |
+
product_input = gr.Image(type="pil", label="์ํ ์ด๋ฏธ์ง", image_mode="RGB")
|
|
|
176 |
prompt_input = gr.Textbox(
|
177 |
lines=2,
|
178 |
+
placeholder="ํฉ์ฑ ๋ฐฉ๋ฒ์ ์ค๋ช
ํด์ฃผ์ธ์. (์: '์ด ์ฌ๋์ด ์ํ์ ๋ค๊ณ ์๋ ๋ชจ์ต' ๋๋ '์ด ์ฌ๋์ด ์ด ์ํ์ ์ฌ์ฉํ๋ ๋ชจ์ต')",
|
179 |
+
label="ํฉ์ฑ ๋ฐฉ๋ฒ ์ค๋ช
"
|
180 |
)
|
181 |
+
submit_btn = gr.Button("์ด๋ฏธ์ง ํฉ์ฑ ์คํ")
|
182 |
with gr.Column():
|
183 |
+
output_gallery = gr.Gallery(label="ํฉ์ฑ ๊ฒฐ๊ณผ")
|
184 |
+
output_text = gr.Textbox(label="AI ์๋ต ํ
์คํธ", visible=True)
|
185 |
|
186 |
submit_btn.click(
|
187 |
+
fn=process_images_and_prompt,
|
188 |
+
inputs=[person_input, product_input, prompt_input],
|
189 |
+
outputs=[output_gallery, output_text],
|
190 |
)
|
191 |
|
192 |
gr.HTML("""
|
193 |
<div style="margin-top: 20px; padding: 10px; background-color: #f8f9fa; border-radius: 8px;">
|
194 |
<h3>์ฌ์ฉ ํ:</h3>
|
195 |
<ul>
|
196 |
+
<li><strong>์์ฐ์ค๋ฌ์ด ํฉ์ฑ:</strong> "์ด ์ฌ๋์ด ์ด ์ํ์ ์ฌ์ฉํ๋ ์์ฐ์ค๋ฌ์ด ๋ชจ์ต์ ๋ง๋ค์ด์ฃผ์ธ์."</li>
|
197 |
+
<li><strong>ํน์ ์์น ์ง์ :</strong> "์ด ์ฌ๋์ด ์ด ์ํ์ ์์ ๋ค๊ณ ์๋ ๋ชจ์ต์ ๋ณด์ฌ์ฃผ์ธ์."</li>
|
198 |
+
<li><strong>์ํ ๊ฐ์กฐ:</strong> "์ด ์ฌ๋์ด ์ด ์ํ์ ์ฌ์ฉํ๋ ๋ชจ์ต์ ๋ณด์ฌ์ฃผ๋, ์ํ์ด ์ ๋ณด์ด๋๋ก ํด์ฃผ์ธ์."</li>
|
199 |
<li><strong>์์ด ํ๋กฌํํธ:</strong> ๋ ๋์ ๊ฒฐ๊ณผ๋ฅผ ์ํด ์์ด์ ํ๊ตญ์ด๋ฅผ ํจ๊ป ์ฌ์ฉํด ๋ณด์ธ์.</li>
|
200 |
+
<li><strong>๊ตฌ์ฒด์ ์ธ ์ง์:</strong> ํฉ์ฑ ๋ฐฉ๋ฒ์ ๊ตฌ์ฒด์ ์ผ๋ก ์ค๋ช
ํ ์๋ก ๋ ์ข์ ๊ฒฐ๊ณผ๋ฅผ ์ป์ ์ ์์ต๋๋ค.</li>
|
201 |
</ul>
|
202 |
</div>
|
203 |
""")
|
204 |
|
205 |
# --- ์คํ ---
|
206 |
+
if __name__ == "__main__":
|
207 |
+
demo.launch(share=True)
|