Kims12 commited on
Commit
d86f643
ยท
verified ยท
1 Parent(s): 639b571

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +42 -46
app.py CHANGED
@@ -18,53 +18,52 @@ load_dotenv()
18
  logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')
19
  logger = logging.getLogger(__name__)
20
 
21
- # LLM ๋ชจ๋ธ ์ดˆ๊ธฐํ™” (๋ฒˆ์—ญ ๊ธฐ๋Šฅ์šฉ)
22
- def initialize_gemini():
23
- api_key = os.environ.get("GEMINI_API_KEY")
24
- if not api_key:
25
- logger.error("GEMINI_API_KEY๊ฐ€ ์„ค์ •๋˜์ง€ ์•Š์•˜์Šต๋‹ˆ๋‹ค.")
26
- return None
27
-
28
- # genai ํด๋ผ์ด์–ธํŠธ ์ƒ์„ฑ
29
- client = genai.Client(api_key=api_key)
30
-
31
- # GenerativeModel ์ธ์Šคํ„ด์Šค ์ƒ์„ฑ
32
- model = client.models.get_model("gemini-2.0-flash")
33
-
34
- return model
35
-
36
- # LLM ๋ชจ๋ธ๋กœ ํ•œ๊ตญ์–ด ํ”„๋กฌํ”„ํŠธ๋ฅผ ์˜์–ด๋กœ ๋ฒˆ์—ญ
37
- def translate_to_english(korean_prompt, model):
38
- if not korean_prompt or not model:
39
- return korean_prompt
40
-
41
  try:
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
42
  translation_prompt = f"""
43
- ๋‹ค์Œ ํ•œ๊ตญ์–ด ํ…์ŠคํŠธ๋ฅผ ์˜์–ด๋กœ ์ •ํ™•ํ•˜๊ฒŒ ๋ฒˆ์—ญํ•ด์ฃผ์„ธ์š”. ๋งฅ๋ฝ๊ณผ ์˜๋ฏธ๋ฅผ ์ตœ๋Œ€ํ•œ ๋ณด์กดํ•˜์„ธ์š”.
44
- ๋ฒˆ์—ญํ•  ํ…์ŠคํŠธ: {korean_prompt}
45
 
46
- ๋ฒˆ์—ญ๋งŒ ์ œ๊ณตํ•˜๊ณ  ๋‹ค๋ฅธ ์„ค๋ช…์€ ํ•˜์ง€ ๋งˆ์„ธ์š”.
 
 
47
  """
48
 
49
- # ์ƒˆ๋กœ์šด API ํ˜•์‹์— ๋งž๊ฒŒ ์ˆ˜์ •
50
- response = model.generate_content(
51
- types.GenerateContentRequest(
52
- contents=[types.Content(parts=[types.Part(text=translation_prompt)])],
53
- generation_config=types.GenerationConfig(
54
- temperature=0.2,
55
- max_output_tokens=1024,
56
- top_p=0.9,
57
- )
58
- )
59
- )
60
 
61
  # ์‘๋‹ต์—์„œ ํ…์ŠคํŠธ ์ถ”์ถœ
62
- english_prompt = response.candidates[0].content.parts[0].text.strip()
63
- logger.info(f"๋ฒˆ์—ญ ๊ฒฐ๊ณผ: {english_prompt}")
64
- return english_prompt
 
 
 
 
65
  except Exception as e:
66
  logger.exception(f"๋ฒˆ์—ญ ์ค‘ ์˜ค๋ฅ˜ ๋ฐœ์ƒ: {str(e)}")
67
- return korean_prompt # ์˜ค๋ฅ˜ ๋ฐœ์ƒ ์‹œ ์›๋ณธ ํ•œ๊ตญ์–ด ํ”„๋กฌํ”„ํŠธ ๋ฐ˜ํ™˜
68
 
69
  def save_binary_file(file_name, data):
70
  with open(file_name, "wb") as f:
@@ -134,7 +133,7 @@ def preprocess_prompt(prompt, image1=None, image2=None, image3=None):
134
 
135
  return prompt
136
 
137
- def generate_with_images(prompt, images, gemini_model):
138
  """
139
  ๊ณต์‹ ๋ฌธ์„œ์— ๊ธฐ๋ฐ˜ํ•œ ์˜ฌ๋ฐ”๋ฅธ API ํ˜ธ์ถœ ๋ฐฉ์‹ ๊ตฌํ˜„
140
  ์žฌ์‹œ๋„ ๋กœ์ง ์ถ”๊ฐ€
@@ -153,7 +152,7 @@ def generate_with_images(prompt, images, gemini_model):
153
  client = genai.Client(api_key=api_key)
154
 
155
  # ํ”„๋กฌํ”„ํŠธ ๋ฒˆ์—ญ
156
- english_prompt = translate_to_english(prompt, gemini_model) if gemini_model else prompt
157
  logger.info(f"์›๋ณธ ํ”„๋กฌํ”„ํŠธ: {prompt}")
158
  logger.info(f"๋ฒˆ์—ญ๋œ ํ”„๋กฌํ”„ํŠธ: {english_prompt}")
159
 
@@ -222,7 +221,7 @@ def generate_with_images(prompt, images, gemini_model):
222
  logger.exception("์ด๋ฏธ์ง€ ์ƒ์„ฑ ์ค‘ ์˜ค๋ฅ˜ ๋ฐœ์ƒ:")
223
  return None, f"์˜ค๋ฅ˜ ๋ฐœ์ƒ: {str(e)}"
224
 
225
- def process_images_with_prompt(image1, image2, image3, prompt, gemini_model):
226
  """
227
  3๊ฐœ์˜ ์ด๋ฏธ์ง€์™€ ํ”„๋กฌํ”„ํŠธ๋ฅผ ์ฒ˜๋ฆฌํ•˜๋Š” ํ•จ์ˆ˜
228
  """
@@ -250,7 +249,7 @@ def process_images_with_prompt(image1, image2, image3, prompt, gemini_model):
250
  prompt = preprocess_prompt(prompt, image1, image2, image3)
251
 
252
  # ์ด๋ฏธ์ง€ ์ƒ์„ฑ API ํ˜ธ์ถœ
253
- return generate_with_images(prompt, images, gemini_model)
254
 
255
  except Exception as e:
256
  logger.exception("์ด๋ฏธ์ง€ ์ฒ˜๋ฆฌ ์ค‘ ์˜ค๋ฅ˜ ๋ฐœ์ƒ:")
@@ -272,9 +271,6 @@ def update_prompt_from_function(function_choice):
272
 
273
  # Gradio ์ธํ„ฐํŽ˜์ด์Šค (์ˆ˜์ •๋œ ๋ฒ„์ „)
274
  def create_interface():
275
- # Gemini ๋ชจ๋ธ ์ดˆ๊ธฐํ™”
276
- gemini_model = initialize_gemini()
277
-
278
  with gr.Blocks() as demo:
279
  gr.HTML(
280
  """
@@ -338,7 +334,7 @@ def create_interface():
338
  def process_and_show_prompt(image1, image2, image3, prompt):
339
  try:
340
  # ์ด๋ฏธ์ง€ ์ƒ์„ฑ ํ•จ์ˆ˜ ํ˜ธ์ถœ
341
- result_img, status = process_images_with_prompt(image1, image2, image3, prompt, gemini_model)
342
 
343
  # ํ”„๋กฌํ”„ํŠธ ์ „์ฒ˜๋ฆฌ
344
  processed_prompt = preprocess_prompt(prompt, image1, image2, image3)
 
18
  logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')
19
  logger = logging.getLogger(__name__)
20
 
21
+ # LLM ์„ค์ • ๋ฐ ๋ฒˆ์—ญ ํ•จ์ˆ˜
22
+ def get_translation(korean_text):
23
+ """
24
+ ํ•œ๊ตญ์–ด ํ…์ŠคํŠธ๋ฅผ ์˜์–ด๋กœ ๋ฒˆ์—ญ
25
+ """
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
26
  try:
27
+ api_key = os.environ.get("GEMINI_API_KEY")
28
+ if not api_key:
29
+ logger.error("GEMINI_API_KEY๊ฐ€ ์„ค์ •๋˜์ง€ ์•Š์•˜์Šต๋‹ˆ๋‹ค.")
30
+ return korean_text
31
+
32
+ # ํด๋ผ์ด์–ธํŠธ ์ดˆ๊ธฐํ™”
33
+ client = genai.GenerativeModel(
34
+ model_name="gemini-2.0-flash",
35
+ generation_config={
36
+ "temperature": 0.2,
37
+ "max_output_tokens": 1024,
38
+ "top_p": 0.9,
39
+ },
40
+ system_instruction="You are a professional translator who translates Korean to English accurately.",
41
+ api_key=api_key
42
+ )
43
+
44
+ # ๋ฒˆ์—ญ ํ”„๋กฌํ”„ํŠธ
45
  translation_prompt = f"""
46
+ Translate the following Korean text to English accurately:
 
47
 
48
+ {korean_text}
49
+
50
+ Provide only the translation, no explanations.
51
  """
52
 
53
+ # ๋ฒˆ์—ญ ์š”์ฒญ
54
+ response = client.generate_content(translation_prompt)
 
 
 
 
 
 
 
 
 
55
 
56
  # ์‘๋‹ต์—์„œ ํ…์ŠคํŠธ ์ถ”์ถœ
57
+ if hasattr(response, 'text'):
58
+ english_text = response.text.strip()
59
+ logger.info(f"๋ฒˆ์—ญ ๊ฒฐ๊ณผ: {english_text}")
60
+ return english_text
61
+ else:
62
+ logger.warning("๋ฒˆ์—ญ ์‘๋‹ต์— text ์†์„ฑ์ด ์—†์Šต๋‹ˆ๋‹ค.")
63
+ return korean_text
64
  except Exception as e:
65
  logger.exception(f"๋ฒˆ์—ญ ์ค‘ ์˜ค๋ฅ˜ ๋ฐœ์ƒ: {str(e)}")
66
+ return korean_text # ์˜ค๋ฅ˜ ๋ฐœ์ƒ ์‹œ ์›๋ณธ ํ•œ๊ตญ์–ด ํ”„๋กฌํ”„ํŠธ ๋ฐ˜ํ™˜
67
 
68
  def save_binary_file(file_name, data):
69
  with open(file_name, "wb") as f:
 
133
 
134
  return prompt
135
 
136
+ def generate_with_images(prompt, images):
137
  """
138
  ๊ณต์‹ ๋ฌธ์„œ์— ๊ธฐ๋ฐ˜ํ•œ ์˜ฌ๋ฐ”๋ฅธ API ํ˜ธ์ถœ ๋ฐฉ์‹ ๊ตฌํ˜„
139
  ์žฌ์‹œ๋„ ๋กœ์ง ์ถ”๊ฐ€
 
152
  client = genai.Client(api_key=api_key)
153
 
154
  # ํ”„๋กฌํ”„ํŠธ ๋ฒˆ์—ญ
155
+ english_prompt = get_translation(prompt)
156
  logger.info(f"์›๋ณธ ํ”„๋กฌํ”„ํŠธ: {prompt}")
157
  logger.info(f"๋ฒˆ์—ญ๋œ ํ”„๋กฌํ”„ํŠธ: {english_prompt}")
158
 
 
221
  logger.exception("์ด๋ฏธ์ง€ ์ƒ์„ฑ ์ค‘ ์˜ค๋ฅ˜ ๋ฐœ์ƒ:")
222
  return None, f"์˜ค๋ฅ˜ ๋ฐœ์ƒ: {str(e)}"
223
 
224
+ def process_images_with_prompt(image1, image2, image3, prompt):
225
  """
226
  3๊ฐœ์˜ ์ด๋ฏธ์ง€์™€ ํ”„๋กฌํ”„ํŠธ๋ฅผ ์ฒ˜๋ฆฌํ•˜๋Š” ํ•จ์ˆ˜
227
  """
 
249
  prompt = preprocess_prompt(prompt, image1, image2, image3)
250
 
251
  # ์ด๋ฏธ์ง€ ์ƒ์„ฑ API ํ˜ธ์ถœ
252
+ return generate_with_images(prompt, images)
253
 
254
  except Exception as e:
255
  logger.exception("์ด๋ฏธ์ง€ ์ฒ˜๋ฆฌ ์ค‘ ์˜ค๋ฅ˜ ๋ฐœ์ƒ:")
 
271
 
272
  # Gradio ์ธํ„ฐํŽ˜์ด์Šค (์ˆ˜์ •๋œ ๋ฒ„์ „)
273
  def create_interface():
 
 
 
274
  with gr.Blocks() as demo:
275
  gr.HTML(
276
  """
 
334
  def process_and_show_prompt(image1, image2, image3, prompt):
335
  try:
336
  # ์ด๋ฏธ์ง€ ์ƒ์„ฑ ํ•จ์ˆ˜ ํ˜ธ์ถœ
337
+ result_img, status = process_images_with_prompt(image1, image2, image3, prompt)
338
 
339
  # ํ”„๋กฌํ”„ํŠธ ์ „์ฒ˜๋ฆฌ
340
  processed_prompt = preprocess_prompt(prompt, image1, image2, image3)