openfree commited on
Commit
065b521
ยท
verified ยท
1 Parent(s): f2f0666

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +47 -33
app.py CHANGED
@@ -22,7 +22,7 @@ import os
22
  import gc
23
  from openai import OpenAI
24
  import re
25
-
26
  # Load system prompts
27
  system_prompt_t2v = """๋‹น์‹ ์€ ๋น„๋””์˜ค ์ƒ์„ฑ์„ ์œ„ํ•œ ํ”„๋กฌํ”„ํŠธ ์ „๋ฌธ๊ฐ€์ž…๋‹ˆ๋‹ค.
28
  ์ฃผ์–ด์ง„ ํ”„๋กฌํ”„ํŠธ๋ฅผ ๋‹ค์Œ ๊ตฌ์กฐ์— ๋งž๊ฒŒ ๊ฐœ์„ ํ•ด์ฃผ์„ธ์š”:
@@ -551,42 +551,56 @@ system_prompt_scenario = """๋‹น์‹ ์€ ์˜์ƒ ์Šคํฌ๋ฆฝํŠธ์— ๋งž๋Š” ๋ฐฐ๊ฒฝ ์˜
551
 
552
 
553
  def analyze_scenario(scenario):
554
- """์‹œ๋‚˜๋ฆฌ์˜ค๋ฅผ ๋ถ„์„ํ•˜์—ฌ ๋ฐฐ๊ฒฝ ์˜์ƒ์šฉ ํ”„๋กฌํ”„ํŠธ ์ƒ์„ฑ"""
555
- messages = [
556
- {"role": "system", "content": system_prompt_scenario},
557
- {"role": "user", "content": f"""
558
- ๋‹ค์Œ ์Šคํฌ๋ฆฝํŠธ์˜ ๋ถ„์œ„๊ธฐ์™€ ๊ฐ์„ฑ์„ ํ‘œํ˜„ํ•  ์ˆ˜ ์žˆ๋Š” ๋ฐฐ๊ฒฝ ์˜์ƒ ํ”„๋กฌํ”„ํŠธ๋ฅผ ์ƒ์„ฑํ•ด์ฃผ์„ธ์š”:
 
 
 
 
 
 
 
 
 
 
 
 
 
 
559
 
 
560
  {scenario}
561
 
562
- ๊ฐ ์„น์…˜๋ณ„๋กœ ์ง์ ‘์ ์ธ ์ œํ’ˆ ๋ฌ˜์‚ฌ๋Š” ํ”ผํ•˜๊ณ , ์Šคํฌ๋ฆฝํŠธ์˜ ๊ฐ์„ฑ์„ ํ‘œํ˜„ํ•˜๋Š” ๋ฐฐ๊ฒฝ ์˜์ƒ์— ์ง‘์ค‘ํ•ด์ฃผ์„ธ์š”."""},
563
- ]
564
-
565
- try:
566
- response = client.chat.completions.create(
567
- model="gpt-4-1106-preview",
568
- messages=messages,
569
- max_tokens=2000,
570
- )
571
- prompts = response.choices[0].message.content.strip().split("\n\n")
572
-
573
- # ํ”„๋กฌํ”„ํŠธ ์ฒ˜๋ฆฌ ๋กœ์ง์€ ๋™์ผ
574
- section_prompts = []
575
- current_section = ""
576
- for line in prompts:
577
- if line.strip():
578
- if any(section in line for section in ["1.", "2.", "3.", "4.", "5."]):
579
- if current_section:
580
- section_prompts.append(current_section)
581
- current_section = line
582
- else:
583
- current_section += "\n" + line
584
- if current_section:
585
- section_prompts.append(current_section)
586
 
587
- while len(section_prompts) < 5:
588
- section_prompts.append("์ถ”๊ฐ€ ์„น์…˜์ด ํ•„์š”ํ•ฉ๋‹ˆ๋‹ค.")
589
- return section_prompts[:5]
 
 
590
  except Exception as e:
591
  print(f"Error during scenario analysis: {e}")
592
  return ["Error occurred during analysis"] * 5
 
22
  import gc
23
  from openai import OpenAI
24
  import re
25
+ import time
26
  # Load system prompts
27
  system_prompt_t2v = """๋‹น์‹ ์€ ๋น„๋””์˜ค ์ƒ์„ฑ์„ ์œ„ํ•œ ํ”„๋กฌํ”„ํŠธ ์ „๋ฌธ๊ฐ€์ž…๋‹ˆ๋‹ค.
28
  ์ฃผ์–ด์ง„ ํ”„๋กฌํ”„ํŠธ๋ฅผ ๋‹ค์Œ ๊ตฌ์กฐ์— ๋งž๊ฒŒ ๊ฐœ์„ ํ•ด์ฃผ์„ธ์š”:
 
551
 
552
 
553
  def analyze_scenario(scenario):
554
+ """์‹œ๋‚˜๋ฆฌ์˜ค๋ฅผ ๋ถ„์„ํ•˜์—ฌ ๊ฐ ์„น์…˜๋ณ„ ๋ฐฐ๊ฒฝ ์˜์ƒ์šฉ ํ”„๋กฌํ”„ํŠธ ์ƒ์„ฑ"""
555
+ try:
556
+ # ๊ฐ ์„น์…˜๋ณ„ ํ”„๋กฌํ”„ํŠธ ์ƒ์„ฑ์„ ์œ„ํ•œ ๋ฉ”์‹œ์ง€ ๊ตฌ์„ฑ
557
+ section_prompts = []
558
+
559
+ for section_num in range(1, 6):
560
+ section_descriptions = {
561
+ 1: "๋ฐฐ๊ฒฝ ๋ฐ ํ•„์š”์„ฑ: ์ฃผ์ œ์˜ ์ „๋ฐ˜์ ์ธ ๋ถ„์œ„๊ธฐ๋ฅผ ํ‘œํ˜„ํ•˜๋Š” ๋ฐฐ๊ฒฝ ์”ฌ",
562
+ 2: "ํฅ๋ฏธ ์œ ๋ฐœ: ๊ธด์žฅ๊ฐ์ด๋‚˜ ๊ฐˆ๋“ฑ์„ ์•”์‹œํ•˜๋Š” ๋ถ„์œ„๊ธฐ ์žˆ๋Š” ๋ฐฐ๊ฒฝ",
563
+ 3: "ํ•ด๊ฒฐ์ฑ… ์ œ์‹œ: ํฌ๋ง์ ์ด๊ณ  ๋ฐ์€ ํ†ค์˜ ๋ฐฐ๊ฒฝ ์ „ํ™˜",
564
+ 4: "๋ณธ๋ก : ์•ˆ์ •๊ฐ ์žˆ๊ณ  ์‹ ๋ขฐ๋„๋ฅผ ๋†’์ด๋Š” ๋ฐฐ๊ฒฝ",
565
+ 5: "๊ฒฐ๋ก : ์ž„ํŒฉํŠธ ์žˆ๋Š” ๋งˆ๋ฌด๋ฆฌ๋ฅผ ์œ„ํ•œ ์—ญ๋™์ ์ธ ๋ฐฐ๊ฒฝ"
566
+ }
567
+
568
+ messages = [
569
+ {"role": "system", "content": system_prompt_scenario},
570
+ {"role": "user", "content": f"""
571
+ ๋‹ค์Œ ์Šคํฌ๋ฆฝํŠธ์˜ {section_num}๋ฒˆ์งธ ์„น์…˜({section_descriptions[section_num]})์— ๋Œ€ํ•œ
572
+ ๋ฐฐ๊ฒฝ ์˜์ƒ ํ”„๋กฌํ”„ํŠธ๋ฅผ ์ƒ์„ฑํ•ด์ฃผ์„ธ์š”.
573
 
574
+ ์Šคํฌ๋ฆฝํŠธ:
575
  {scenario}
576
 
577
+ ์ฃผ์˜์‚ฌํ•ญ:
578
+ 1. ํ•ด๋‹น ์„น์…˜์˜ ํŠน์„ฑ({section_descriptions[section_num]})์— ๋งž๋Š” ๋ถ„์œ„๊ธฐ์™€ ํ†ค์„ ๋ฐ˜์˜ํ•˜์„ธ์š”.
579
+ 2. ์ง์ ‘์ ์ธ ์ œํ’ˆ/์„œ๋น„์Šค ๋ฌ˜์‚ฌ๋Š” ํ”ผํ•˜๊ณ , ๊ฐ์„ฑ์ ์ด๊ณ  ์€์œ ์ ์ธ ๋ฐฐ๊ฒฝ ์˜์ƒ์— ์ง‘์ค‘ํ•˜์„ธ์š”.
580
+ 3. ๋‹ค์Œ ๊ตฌ์กฐ๋ฅผ ๋ฐ˜๋“œ์‹œ ํฌํ•จํ•˜์„ธ์š”:
581
+ - ์ฃผ์š” ๋™์ž‘์„ ๋ช…ํ™•ํ•œ ํ•œ ๋ฌธ์žฅ์œผ๋กœ ์‹œ์ž‘
582
+ - ๊ตฌ์ฒด์ ์ธ ๋™์ž‘๊ณผ ์ œ์Šค์ฒ˜๋ฅผ ์‹œ๊ฐ„ ์ˆœ์„œ๋Œ€๋กœ ์„ค๋ช…
583
+ - ๋ฐฐ๊ฒฝ๊ณผ ํ™˜๊ฒฝ ์„ธ๋ถ€ ์‚ฌํ•ญ์„ ๊ตฌ์ฒด์ ์œผ๋กœ ํฌํ•จ
584
+ - ์นด๋ฉ”๋ผ ๊ฐ๋„์™€ ์›€์ง์ž„์„ ๋ช…์‹œ
585
+ - ์กฐ๋ช…๊ณผ ์ƒ‰์ƒ์„ ์ž์„ธํžˆ ์„ค๋ช…
586
+ - ๋ณ€ํ™”๋‚˜ ๊ฐ‘์ž‘์Šค๋Ÿฌ์šด ์‚ฌ๊ฑด์„ ์ž์—ฐ์Šค๋Ÿฝ๊ฒŒ ํฌํ•จ"""}
587
+ ]
588
+
589
+ response = client.chat.completions.create(
590
+ model="gpt-4-1106-preview",
591
+ messages=messages,
592
+ max_tokens=500,
593
+ temperature=0.7
594
+ )
595
+
596
+ section_prompt = response.choices[0].message.content.strip()
597
+ section_prompts.append(f"{section_num}. {section_prompt}")
 
 
 
598
 
599
+ # API ์š”์ฒญ ์‚ฌ์ด์— ์งง์€ ๋”œ๋ ˆ์ด ์ถ”๊ฐ€
600
+ time.sleep(1)
601
+
602
+ return section_prompts
603
+
604
  except Exception as e:
605
  print(f"Error during scenario analysis: {e}")
606
  return ["Error occurred during analysis"] * 5