ginipick commited on
Commit
f09c591
ยท
verified ยท
1 Parent(s): 10414b7

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +26 -23
app.py CHANGED
@@ -31,8 +31,8 @@ os.environ["TRANSFORMERS_CACHE"] = CACHE_PATH
31
  os.environ["HF_HUB_CACHE"] = CACHE_PATH
32
  os.environ["HF_HOME"] = CACHE_PATH
33
 
34
- # Google GenAI ์‚ฌ์šฉ์„ ์œ„ํ•ด์„œ๋Š” ๋‹ค์Œ๊ณผ ๊ฐ™์€ ํ™˜๊ฒฝ ๋ณ€์ˆ˜๊ฐ€ ํ•„์š”ํ•ฉ๋‹ˆ๋‹ค.
35
- # os.environ["GAPI_TOKEN"] = "<YOUR_GOOGLE_GENAI_API_KEY>"
36
 
37
  # ์ž‘์—… ์‹œ๊ฐ„ ์ธก์ •์„ ์œ„ํ•œ ๊ฐ„๋‹จํ•œ ํƒ€์ด๋จธ ํด๋ž˜์Šค
38
  class timer:
@@ -82,13 +82,21 @@ def generate_by_google_genai(text, file_name, model="gemini-2.0-flash-exp"):
82
  - file_name: ์›๋ณธ ์ด๋ฏธ์ง€(์˜ˆ: .png) ๊ฒฝ๋กœ
83
  - model: ์‚ฌ์šฉํ•  gemini ๋ชจ๋ธ ์ด๋ฆ„
84
  """
85
- # 1) Google Client ์ดˆ๊ธฐํ™”
86
- client = genai.Client(api_key=os.getenv("GAPI_TOKEN"))
 
 
 
 
 
 
 
 
87
 
88
- # 2) ์ด๋ฏธ์ง€ ์—…๋กœ๋“œ
89
  files = [client.files.upload(file=file_name)]
90
 
91
- # 3) gemini์— ์ „๋‹ฌํ•  Content ์ค€๋น„ (์ด๋ฏธ์ง€ + ํ”„๋กฌํ”„ํŠธ)
92
  contents = [
93
  types.Content(
94
  role="user",
@@ -102,7 +110,7 @@ def generate_by_google_genai(text, file_name, model="gemini-2.0-flash-exp"):
102
  ),
103
  ]
104
 
105
- # 4) ์ƒ์„ฑ/๋ณ€ํ™˜ ์„ค์ •
106
  generate_content_config = types.GenerateContentConfig(
107
  temperature=1,
108
  top_p=0.95,
@@ -133,18 +141,21 @@ def generate_by_google_genai(text, file_name, model="gemini-2.0-flash-exp"):
133
  save_binary_file(temp_path, candidate.inline_data.data)
134
  print(f"File of mime type {candidate.inline_data.mime_type} saved to: {temp_path}")
135
  image_path = temp_path
136
- break # ์ด๋ฏธ์ง€๊ฐ€ ์˜ค๋ฉด ์šฐ์„  ๋ฉˆ์ถค
 
137
  else:
138
- # ์—†์œผ๋ฉด ํ…์ŠคํŠธ๋ฅผ ๋ˆ„์ 
139
  text_response += chunk.text + "\n"
140
 
141
- # ์—…๋กœ๋“œ ํŒŒ์ผ(google.genai.files.File) ๊ฐ์ฒด ์ œ๊ฑฐ
142
  del files
143
 
144
  return image_path, text_response
145
 
146
  #######################################
147
- # 3. Gradio ํ•จ์ˆ˜: (1) FLUX๋กœ ์ด๋ฏธ์ง€ ์ƒ์„ฑ -> (2) Google GenAI๋กœ ํ…์ŠคํŠธ ๊ต์ฒด
 
 
148
  #######################################
149
 
150
  def generate_initial_image(prompt, text, height, width, steps, scale, seed):
@@ -153,10 +164,7 @@ def generate_initial_image(prompt, text, height, width, steps, scale, seed):
153
  prompt: ์ด๋ฏธ์ง€ ๋ฐฐ๊ฒฝ/์žฅ๋ฉด/์Šคํƒ€์ผ ๋ฌ˜์‚ฌ๋ฅผ ์œ„ํ•œ ํ”„๋กฌํ”„ํŠธ
154
  text: ์‹ค์ œ๋กœ ์ด๋ฏธ์ง€์— ๋“ค์–ด๊ฐ€์•ผ ํ•  ๋ฌธ๊ตฌ(์˜ˆ: "์•ˆ๋…•ํ•˜์„ธ์š”", "Hello world" ๋“ฑ)
155
  """
156
- # ์ด๋ฏธ์ง€์— ํ…์ŠคํŠธ๋ฅผ ํฌํ•จ์‹œํ‚ค๋ ค๋ฉด ํ”„๋กฌํ”„ํŠธ์— ์ง์ ‘ ๋ฌธ๊ตฌ ์š”์ฒญ์„ ๋„ฃ๋Š” ๊ฒƒ์ด ์ค‘์š”.
157
- # Diffusion ๋ชจ๋ธ์— ๋”ฐ๋ผ ์ž˜ ๋ฐ˜์˜๋˜์ง€ ์•Š์„ ์ˆ˜๋„ ์žˆ์œผ๋‹ˆ, ๊ตฌ์ฒด์ ์œผ๋กœ ๊ธฐ์žฌํ• ์ˆ˜๋ก ์œ ๋ฆฌ.
158
- # ์˜ˆ: "A poster with large bold Korean text that says '์•ˆ๋…•ํ•˜์„ธ์š”' in red color ..."
159
- # ์—ฌ๊ธฐ์„œ๋Š” ๊ฐ„๋‹จํžˆ prompt ๋’ค์— ํ…์ŠคํŠธ ์‚ฝ์ž… ์˜ˆ์‹œ๋ฅผ ๋ณด์—ฌ์คŒ
160
  combined_prompt = f"{prompt} with clear readable text that says '{text}'"
161
 
162
  with torch.inference_mode(), torch.autocast("cuda", dtype=torch.bfloat16), timer("inference"):
@@ -200,6 +208,7 @@ def change_text_in_image(original_image, new_text):
200
  return None, text_response
201
 
202
  except Exception as e:
 
203
  raise gr.Error(f"Error: {e}")
204
 
205
 
@@ -212,12 +221,10 @@ with gr.Blocks(title="Flux + Google GenAI Text Replacement") as demo:
212
  """
213
  # Flux ๊ธฐ๋ฐ˜ ์ด๋ฏธ์ง€ ์ƒ์„ฑ + Google GenAI๋ฅผ ํ†ตํ•œ ํ…์ŠคํŠธ ๋ณ€ํ™˜
214
  **์ด ๋ฐ๋ชจ๋Š” ์•„๋ž˜ ๋‘ ๋‹จ๊ณ„๋ฅผ ๋ณด์—ฌ์ค๋‹ˆ๋‹ค.**
215
-
216
  1) **Diffusion ๋ชจ๋ธ(FluxPipeline)์„ ์ด์šฉํ•ด** ์ด๋ฏธ์ง€ ์ƒ์„ฑ.
217
  - ์ด๋•Œ, ์‚ฌ์šฉ์ž๊ฐ€ ์ง€์ •ํ•œ ํ…์ŠคํŠธ๋ฅผ ์ด๋ฏธ์ง€ ์•ˆ์— ํ‘œ์‹œํ•˜๋„๋ก ์‹œ๋„ํ•ฉ๋‹ˆ๋‹ค.
218
  2) **์ƒ์„ฑ๋œ ์ด๋ฏธ์ง€๋ฅผ Google GenAI(gemini) ๋ชจ๋ธ์— ์ „๋‹ฌ**ํ•˜์—ฌ,
219
  - ์ด๋ฏธ์ง€ ๋‚ด ํ…์ŠคํŠธ ๋ถ€๋ถ„๋งŒ ๋‹ค๋ฅธ ๋ฌธ์ž์—ด๋กœ ๋ณ€๊ฒฝ.
220
-
221
  ---
222
  """
223
  )
@@ -273,9 +280,5 @@ with gr.Blocks(title="Flux + Google GenAI Text Replacement") as demo:
273
  outputs=[output_img, output_txt]
274
  )
275
 
276
- if __name__ == "__main__":
277
- # ์ด ์ค„์„ ์ œ๊ฑฐ
278
- # demo.queue(concurrency_count=1, max_size=20).launch()
279
-
280
- # ์ด๋ ‡๊ฒŒ ๋ณ€๏ฟฝ๏ฟฝ๏ฟฝ
281
- demo.launch(max_threads=20) # ์ „์ฒด ์ž‘์—…์ž ์ˆ˜ ์„ค์ •
 
31
  os.environ["HF_HUB_CACHE"] = CACHE_PATH
32
  os.environ["HF_HOME"] = CACHE_PATH
33
 
34
+ # (์˜ˆ์‹œ) Google GenAI ์‚ฌ์šฉ:
35
+ # export GAPI_TOKEN="<YOUR_GOOGLE_GENAI_API_KEY>"
36
 
37
  # ์ž‘์—… ์‹œ๊ฐ„ ์ธก์ •์„ ์œ„ํ•œ ๊ฐ„๋‹จํ•œ ํƒ€์ด๋จธ ํด๋ž˜์Šค
38
  class timer:
 
82
  - file_name: ์›๋ณธ ์ด๋ฏธ์ง€(์˜ˆ: .png) ๊ฒฝ๋กœ
83
  - model: ์‚ฌ์šฉํ•  gemini ๋ชจ๋ธ ์ด๋ฆ„
84
  """
85
+ # (1) ํ™˜๊ฒฝ ๋ณ€์ˆ˜์—์„œ API ํ‚ค ๊ฐ€์ ธ์˜ค๊ธฐ (ํ•„์ˆ˜)
86
+ api_key = os.getenv("GAPI_TOKEN", None)
87
+ if not api_key:
88
+ raise ValueError(
89
+ "GAPI_TOKEN ํ™˜๊ฒฝ ๋ณ€์ˆ˜๊ฐ€ ์„ค์ •๋˜์ง€ ์•Š์•˜์Šต๋‹ˆ๋‹ค. "
90
+ "Google GenAI API๋ฅผ ์‚ฌ์šฉํ•˜๊ธฐ ์œ„ํ•ด์„œ๋Š” GAPI_TOKEN์ด ํ•„์š”ํ•ฉ๋‹ˆ๋‹ค."
91
+ )
92
+
93
+ # (2) Google Client ์ดˆ๊ธฐํ™”
94
+ client = genai.Client(api_key=api_key)
95
 
96
+ # (3) ์ด๋ฏธ์ง€ ์—…๋กœ๋“œ
97
  files = [client.files.upload(file=file_name)]
98
 
99
+ # (4) gemini์— ์ „๋‹ฌํ•  Content ์ค€๋น„ (์ด๋ฏธ์ง€ + ํ”„๋กฌํ”„ํŠธ)
100
  contents = [
101
  types.Content(
102
  role="user",
 
110
  ),
111
  ]
112
 
113
+ # (5) ์ƒ์„ฑ/๋ณ€ํ™˜ ์„ค์ •
114
  generate_content_config = types.GenerateContentConfig(
115
  temperature=1,
116
  top_p=0.95,
 
141
  save_binary_file(temp_path, candidate.inline_data.data)
142
  print(f"File of mime type {candidate.inline_data.mime_type} saved to: {temp_path}")
143
  image_path = temp_path
144
+ # ์ด๋ฏธ์ง€๊ฐ€ ์ƒ์„ฑ๋˜๋ฉด ๋ณดํ†ต ์—ฌ๊ธฐ์„œ break
145
+ break
146
  else:
147
+ # ์ด๋ฏธ์ง€ ์—†์ด ํ…์ŠคํŠธ๋งŒ ๋ฐ˜ํ™˜๋˜๋Š” ๊ฒฝ์šฐ(ํฌ๋ฐ•) ๋ˆ„์ 
148
  text_response += chunk.text + "\n"
149
 
150
+ # ์—…๋กœ๋“œํ•œ File ๊ฐ์ฒด ์ œ๊ฑฐ
151
  del files
152
 
153
  return image_path, text_response
154
 
155
  #######################################
156
+ # 3. Gradio ํ•จ์ˆ˜:
157
+ # (1) FLUX๋กœ ์ด๋ฏธ์ง€ ์ƒ์„ฑ ->
158
+ # (2) Google GenAI๋กœ ํ…์ŠคํŠธ ๊ต์ฒด
159
  #######################################
160
 
161
  def generate_initial_image(prompt, text, height, width, steps, scale, seed):
 
164
  prompt: ์ด๋ฏธ์ง€ ๋ฐฐ๊ฒฝ/์žฅ๋ฉด/์Šคํƒ€์ผ ๋ฌ˜์‚ฌ๋ฅผ ์œ„ํ•œ ํ”„๋กฌํ”„ํŠธ
165
  text: ์‹ค์ œ๋กœ ์ด๋ฏธ์ง€์— ๋“ค์–ด๊ฐ€์•ผ ํ•  ๋ฌธ๊ตฌ(์˜ˆ: "์•ˆ๋…•ํ•˜์„ธ์š”", "Hello world" ๋“ฑ)
166
  """
167
+ # Diffusion ๋ชจ๋ธ์— "์ด๋ฏธ์ง€ ์•ˆ์— ํ…์ŠคํŠธ๋ฅผ ํ‘œ์‹œํ•˜๋ผ"๋Š” ์š”์ฒญ์„ ๋„ฃ๊ธฐ ์œ„ํ•ด
 
 
 
168
  combined_prompt = f"{prompt} with clear readable text that says '{text}'"
169
 
170
  with torch.inference_mode(), torch.autocast("cuda", dtype=torch.bfloat16), timer("inference"):
 
208
  return None, text_response
209
 
210
  except Exception as e:
211
+ # Gradio ์—๋Ÿฌ ํ‘œ์‹œ๋ฅผ ์œ„ํ•ด gr.Error๋ฅผ ์‚ฌ์šฉ
212
  raise gr.Error(f"Error: {e}")
213
 
214
 
 
221
  """
222
  # Flux ๊ธฐ๋ฐ˜ ์ด๋ฏธ์ง€ ์ƒ์„ฑ + Google GenAI๋ฅผ ํ†ตํ•œ ํ…์ŠคํŠธ ๋ณ€ํ™˜
223
  **์ด ๋ฐ๋ชจ๋Š” ์•„๋ž˜ ๋‘ ๋‹จ๊ณ„๋ฅผ ๋ณด์—ฌ์ค๋‹ˆ๋‹ค.**
 
224
  1) **Diffusion ๋ชจ๋ธ(FluxPipeline)์„ ์ด์šฉํ•ด** ์ด๋ฏธ์ง€ ์ƒ์„ฑ.
225
  - ์ด๋•Œ, ์‚ฌ์šฉ์ž๊ฐ€ ์ง€์ •ํ•œ ํ…์ŠคํŠธ๋ฅผ ์ด๋ฏธ์ง€ ์•ˆ์— ํ‘œ์‹œํ•˜๋„๋ก ์‹œ๋„ํ•ฉ๋‹ˆ๋‹ค.
226
  2) **์ƒ์„ฑ๋œ ์ด๋ฏธ์ง€๋ฅผ Google GenAI(gemini) ๋ชจ๋ธ์— ์ „๋‹ฌ**ํ•˜์—ฌ,
227
  - ์ด๋ฏธ์ง€ ๋‚ด ํ…์ŠคํŠธ ๋ถ€๋ถ„๋งŒ ๋‹ค๋ฅธ ๋ฌธ์ž์—ด๋กœ ๋ณ€๊ฒฝ.
 
228
  ---
229
  """
230
  )
 
280
  outputs=[output_img, output_txt]
281
  )
282
 
283
+ # Gradio ์‹คํ–‰
284
+ demo.launch(max_threads=20)