sharath6900 commited on
Commit
4ac653d
·
verified ·
1 Parent(s): af41dd6

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +32 -18
app.py CHANGED
@@ -5,47 +5,61 @@ from transformers import AutoTokenizer, AutoModelForSeq2SeqLM
5
  tokenizer = AutoTokenizer.from_pretrained("suriya7/bart-finetuned-text-summarization")
6
  model = AutoModelForSeq2SeqLM.from_pretrained("suriya7/bart-finetuned-text-summarization")
7
 
8
- def generate_summary(text, prompt):
9
- # Combine text and prompt into one input
10
- combined_input = f"{prompt}: {text}"
11
  inputs = tokenizer([combined_input], max_length=1024, return_tensors='pt', truncation=True)
12
- summary_ids = model.generate(inputs['input_ids'], max_new_tokens=100, do_sample=False)
13
  summary = tokenizer.decode(summary_ids[0], skip_special_tokens=True)
14
 
15
- return summary
 
 
 
 
 
 
 
 
 
 
 
 
 
 
16
 
17
  # Initialize session state for input history if it doesn't exist
18
  if 'input_history' not in st.session_state:
19
  st.session_state['input_history'] = []
20
 
21
  # Streamlit interface
22
- st.title("Text Summarization App")
23
 
24
  # User text inputs
25
- bulk_text = st.text_area("Enter the bulk text you want to summarize", height=200)
26
- prompt = st.text_input("Enter the prompt for the summary", "What are the key points?")
27
 
28
- if st.button("Generate Summary"):
29
  if bulk_text and prompt:
30
- with st.spinner("Generating summary..."):
31
- summary = generate_summary(bulk_text, prompt)
32
 
33
- # Save the input and summary to the session state history
34
- st.session_state['input_history'].append({"text": bulk_text, "prompt": prompt, "summary": summary})
35
 
36
- st.subheader("Generated Summary:")
37
- st.write(summary)
38
  else:
39
  st.warning("Please enter both the bulk text and the prompt.")
40
 
41
- # Display the history of inputs and summaries
42
  if st.session_state['input_history']:
43
  st.subheader("History")
44
  for i, entry in enumerate(st.session_state['input_history']):
45
  st.write(f"**Input {i+1} (Text):** {entry['text']}")
46
  st.write(f"**Prompt {i+1}:** {entry['prompt']}")
47
- st.write(f"**Summary {i+1}:** {entry['summary']}")
48
  st.write("---")
49
 
50
  # Instructions for using the app
51
- st.write("Enter your bulk text and a prompt for summarization, then click 'Generate Summary' to get a summarized version based on your prompt.")
 
5
  tokenizer = AutoTokenizer.from_pretrained("suriya7/bart-finetuned-text-summarization")
6
  model = AutoModelForSeq2SeqLM.from_pretrained("suriya7/bart-finetuned-text-summarization")
7
 
8
+ def generate_user_stories(text, prompt):
9
+ # Combine prompt with the text to guide the summarization
10
+ combined_input = f"Prompt: {prompt}\n\nText: {text}"
11
  inputs = tokenizer([combined_input], max_length=1024, return_tensors='pt', truncation=True)
12
+ summary_ids = model.generate(inputs['input_ids'], max_new_tokens=150, do_sample=False)
13
  summary = tokenizer.decode(summary_ids[0], skip_special_tokens=True)
14
 
15
+ # Post-process to format as user stories
16
+ user_stories = format_as_user_stories(summary)
17
+
18
+ return user_stories
19
+
20
+ def format_as_user_stories(summary):
21
+ # Placeholder for formatting logic to extract user stories
22
+ # Here you can add specific rules or patterns to convert summary into user stories
23
+ lines = summary.split('. ')
24
+ user_stories = []
25
+ for line in lines:
26
+ # Example of simple pattern matching (can be customized)
27
+ if 'as a' in line.lower() and 'i want' in line.lower():
28
+ user_stories.append(line)
29
+ return '. '.join(user_stories)
30
 
31
  # Initialize session state for input history if it doesn't exist
32
  if 'input_history' not in st.session_state:
33
  st.session_state['input_history'] = []
34
 
35
  # Streamlit interface
36
+ st.title("User Story Generator")
37
 
38
  # User text inputs
39
+ bulk_text = st.text_area("Enter the bulk text (e.g., client calls, meeting transcripts)", height=300)
40
+ prompt = st.text_input("Enter the prompt for the user stories", "Extract user stories from the following text.")
41
 
42
+ if st.button("Generate User Stories"):
43
  if bulk_text and prompt:
44
+ with st.spinner("Generating user stories..."):
45
+ user_stories = generate_user_stories(bulk_text, prompt)
46
 
47
+ # Save the input and user stories to the session state history
48
+ st.session_state['input_history'].append({"text": bulk_text, "prompt": prompt, "user_stories": user_stories})
49
 
50
+ st.subheader("Generated User Stories:")
51
+ st.write(user_stories)
52
  else:
53
  st.warning("Please enter both the bulk text and the prompt.")
54
 
55
+ # Display the history of inputs and user stories
56
  if st.session_state['input_history']:
57
  st.subheader("History")
58
  for i, entry in enumerate(st.session_state['input_history']):
59
  st.write(f"**Input {i+1} (Text):** {entry['text']}")
60
  st.write(f"**Prompt {i+1}:** {entry['prompt']}")
61
+ st.write(f"**User Stories {i+1}:** {entry['user_stories']}")
62
  st.write("---")
63
 
64
  # Instructions for using the app
65
+ st.write("Enter your bulk text and a prompt for user story extraction, then click 'Generate User Stories' to get user stories from the text.")