Vardhan-kuppala commited on
Commit
4448981
·
verified ·
1 Parent(s): 0f7372d

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +19 -2
app.py CHANGED
@@ -18,14 +18,24 @@ def extract_text_from_block(pdf_path, block_pages):
18
  return text
19
 
20
  def generate_test_cases(text, prompt_template):
21
- prompt = prompt_template.format(text=text)
 
 
 
 
 
 
 
 
 
 
22
  messages = [
23
  {"role": "system", "content": "You are a helpful assistant that generates test cases based on given text."},
24
  {"role": "user", "content": prompt}
25
  ]
26
 
27
  full_content = ""
28
- max_iterations = 10
29
  iteration = 0
30
 
31
  while iteration < max_iterations:
@@ -46,6 +56,12 @@ def generate_test_cases(text, prompt_template):
46
  part = response.choices[0].message['content'].strip()
47
  full_content += "\n" + part
48
 
 
 
 
 
 
 
49
  finish_reason = response.choices[0].get("finish_reason", None)
50
  # If the response appears truncated, ask the model to continue
51
  if finish_reason == "length" or part.endswith("..."):
@@ -61,6 +77,7 @@ def generate_test_cases(text, prompt_template):
61
 
62
 
63
 
 
64
  def process_pdf(pdf_file, selected_block, prompt_template):
65
  block_config = {
66
  "5.4.1 OSPI-xSPI-QSPI-SPI Boot": (482, 488),
 
18
  return text
19
 
20
  def generate_test_cases(text, prompt_template):
21
+ # Define a termination phrase that signals completion
22
+ termination_phrase = "### END OF TEST CASES ###"
23
+
24
+ # Append termination instruction to the prompt template
25
+ full_prompt_template = prompt_template + (
26
+ "\n\nPlease ensure that your response ends with '"
27
+ + termination_phrase
28
+ + "' to indicate that no further test cases are needed."
29
+ )
30
+
31
+ prompt = full_prompt_template.format(text=text)
32
  messages = [
33
  {"role": "system", "content": "You are a helpful assistant that generates test cases based on given text."},
34
  {"role": "user", "content": prompt}
35
  ]
36
 
37
  full_content = ""
38
+ max_iterations = 10
39
  iteration = 0
40
 
41
  while iteration < max_iterations:
 
56
  part = response.choices[0].message['content'].strip()
57
  full_content += "\n" + part
58
 
59
+ # Check if the termination phrase is in the output
60
+ if termination_phrase in part:
61
+ # Optionally remove the termination phrase before returning
62
+ full_content = full_content.replace(termination_phrase, "")
63
+ break
64
+
65
  finish_reason = response.choices[0].get("finish_reason", None)
66
  # If the response appears truncated, ask the model to continue
67
  if finish_reason == "length" or part.endswith("..."):
 
77
 
78
 
79
 
80
+
81
  def process_pdf(pdf_file, selected_block, prompt_template):
82
  block_config = {
83
  "5.4.1 OSPI-xSPI-QSPI-SPI Boot": (482, 488),