sharath6900 commited on
Commit
861222f
·
verified ·
1 Parent(s): 4ac653d

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +31 -20
app.py CHANGED
@@ -6,26 +6,35 @@ tokenizer = AutoTokenizer.from_pretrained("suriya7/bart-finetuned-text-summariza
6
  model = AutoModelForSeq2SeqLM.from_pretrained("suriya7/bart-finetuned-text-summarization")
7
 
8
  def generate_user_stories(text, prompt):
9
- # Combine prompt with the text to guide the summarization
10
- combined_input = f"Prompt: {prompt}\n\nText: {text}"
11
- inputs = tokenizer([combined_input], max_length=1024, return_tensors='pt', truncation=True)
12
- summary_ids = model.generate(inputs['input_ids'], max_new_tokens=150, do_sample=False)
13
- summary = tokenizer.decode(summary_ids[0], skip_special_tokens=True)
14
-
15
- # Post-process to format as user stories
16
- user_stories = format_as_user_stories(summary)
17
-
18
- return user_stories
 
 
 
 
 
 
 
19
 
20
  def format_as_user_stories(summary):
21
- # Placeholder for formatting logic to extract user stories
22
- # Here you can add specific rules or patterns to convert summary into user stories
23
- lines = summary.split('. ')
24
  user_stories = []
 
 
25
  for line in lines:
26
- # Example of simple pattern matching (can be customized)
27
- if 'as a' in line.lower() and 'i want' in line.lower():
 
28
  user_stories.append(line)
 
29
  return '. '.join(user_stories)
30
 
31
  # Initialize session state for input history if it doesn't exist
@@ -44,11 +53,13 @@ if st.button("Generate User Stories"):
44
  with st.spinner("Generating user stories..."):
45
  user_stories = generate_user_stories(bulk_text, prompt)
46
 
47
- # Save the input and user stories to the session state history
48
- st.session_state['input_history'].append({"text": bulk_text, "prompt": prompt, "user_stories": user_stories})
49
-
50
- st.subheader("Generated User Stories:")
51
- st.write(user_stories)
 
 
52
  else:
53
  st.warning("Please enter both the bulk text and the prompt.")
54
 
 
6
  model = AutoModelForSeq2SeqLM.from_pretrained("suriya7/bart-finetuned-text-summarization")
7
 
8
  def generate_user_stories(text, prompt):
9
+ try:
10
+ # Combine prompt with the text to guide the summarization
11
+ combined_input = f"Prompt: {prompt}\n\nText: {text}"
12
+
13
+ # Tokenize input with truncation to fit model requirements
14
+ inputs = tokenizer([combined_input], max_length=1024, return_tensors='pt', truncation=True)
15
+
16
+ # Generate summary
17
+ summary_ids = model.generate(inputs['input_ids'], max_new_tokens=150, do_sample=False)
18
+ summary = tokenizer.decode(summary_ids[0], skip_special_tokens=True)
19
+
20
+ # Post-process to format as user stories
21
+ user_stories = format_as_user_stories(summary)
22
+ return user_stories
23
+ except Exception as e:
24
+ st.error(f"An error occurred: {e}")
25
+ return ""
26
 
27
  def format_as_user_stories(summary):
28
+ # Enhanced formatting logic to extract user stories
 
 
29
  user_stories = []
30
+ lines = summary.split('. ')
31
+
32
  for line in lines:
33
+ # Improved pattern matching for user stories
34
+ line = line.strip()
35
+ if "as a" in line.lower() and "i want" in line.lower():
36
  user_stories.append(line)
37
+
38
  return '. '.join(user_stories)
39
 
40
  # Initialize session state for input history if it doesn't exist
 
53
  with st.spinner("Generating user stories..."):
54
  user_stories = generate_user_stories(bulk_text, prompt)
55
 
56
+ if user_stories:
57
+ # Save the input and user stories to the session state history
58
+ st.session_state['input_history'].append({"text": bulk_text, "prompt": prompt, "user_stories": user_stories})
59
+ st.subheader("Generated User Stories:")
60
+ st.write(user_stories)
61
+ else:
62
+ st.warning("No user stories were generated. Please check the input and try again.")
63
  else:
64
  st.warning("Please enter both the bulk text and the prompt.")
65