File size: 2,889 Bytes
c71b2e8
8fff906
b17a76b
8844977
8fff906
 
b17a76b
4ac653d
 
 
af41dd6
4ac653d
8fff906
8844977
4ac653d
 
 
 
 
 
 
 
 
 
 
 
 
 
 
8fff906
8844977
 
 
7f33eef
8844977
4ac653d
c71b2e8
af41dd6
4ac653d
 
8844977
4ac653d
af41dd6
4ac653d
 
8844977
4ac653d
 
8844977
4ac653d
 
c71b2e8
af41dd6
8844977
4ac653d
8844977
 
 
af41dd6
 
4ac653d
8844977
 
 
4ac653d
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
import streamlit as st
from transformers import AutoTokenizer, AutoModelForSeq2SeqLM

# Load the tokenizer and model
tokenizer = AutoTokenizer.from_pretrained("suriya7/bart-finetuned-text-summarization")
model = AutoModelForSeq2SeqLM.from_pretrained("suriya7/bart-finetuned-text-summarization")

def generate_user_stories(text, prompt):
    # Combine prompt with the text to guide the summarization
    combined_input = f"Prompt: {prompt}\n\nText: {text}"
    inputs = tokenizer([combined_input], max_length=1024, return_tensors='pt', truncation=True)
    summary_ids = model.generate(inputs['input_ids'], max_new_tokens=150, do_sample=False)
    summary = tokenizer.decode(summary_ids[0], skip_special_tokens=True)
    
    # Post-process to format as user stories
    user_stories = format_as_user_stories(summary)
    
    return user_stories

def format_as_user_stories(summary):
    # Placeholder for formatting logic to extract user stories
    # Here you can add specific rules or patterns to convert summary into user stories
    lines = summary.split('. ')
    user_stories = []
    for line in lines:
        # Example of simple pattern matching (can be customized)
        if 'as a' in line.lower() and 'i want' in line.lower():
            user_stories.append(line)
    return '. '.join(user_stories)

# Initialize session state for input history if it doesn't exist
if 'input_history' not in st.session_state:
    st.session_state['input_history'] = []

# Streamlit interface
st.title("User Story Generator")

# User text inputs
bulk_text = st.text_area("Enter the bulk text (e.g., client calls, meeting transcripts)", height=300)
prompt = st.text_input("Enter the prompt for the user stories", "Extract user stories from the following text.")

if st.button("Generate User Stories"):
    if bulk_text and prompt:
        with st.spinner("Generating user stories..."):
            user_stories = generate_user_stories(bulk_text, prompt)
            
        # Save the input and user stories to the session state history
        st.session_state['input_history'].append({"text": bulk_text, "prompt": prompt, "user_stories": user_stories})
        
        st.subheader("Generated User Stories:")
        st.write(user_stories)
    else:
        st.warning("Please enter both the bulk text and the prompt.")

# Display the history of inputs and user stories
if st.session_state['input_history']:
    st.subheader("History")
    for i, entry in enumerate(st.session_state['input_history']):
        st.write(f"**Input {i+1} (Text):** {entry['text']}")
        st.write(f"**Prompt {i+1}:** {entry['prompt']}")
        st.write(f"**User Stories {i+1}:** {entry['user_stories']}")
        st.write("---")

# Instructions for using the app
st.write("Enter your bulk text and a prompt for user story extraction, then click 'Generate User Stories' to get user stories from the text.")