File size: 1,158 Bytes
65d2683
eb58d9d
 
 
 
65d2683
442ba7c
57342ce
dca1a64
57342ce
 
 
 
dca1a64
57342ce
dca1a64
57342ce
 
dca1a64
57342ce
 
 
 
 
 
 
 
140e3c2
57342ce
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
pip install --upgrade pip
python -m venv venv_name  # Create a virtual environment (if not already created)
source venv_name/bin/activate  # Activate the virtual environment (Linux/macOS)
pip install --upgrade pip


import streamlit as st
from transformers import BartForConditionalGeneration, BartTokenizer

# Load the pre-trained BART model and tokenizer
model_name = "facebook/bart-large-cnn"
model = BartForConditionalGeneration.from_pretrained(model_name)
tokenizer = BartTokenizer.from_pretrained(model_name)

st.title("Text Summarization App")

# Input text area for user input
input_text = st.text_area("Enter text to summarize:")

if st.button("Summarize"):
    if input_text:
        # Tokenize and summarize the input text
        inputs = tokenizer(input_text, return_tensors="pt", max_length=1024, truncation=True)
        summary_ids = model.generate(inputs["input_ids"], max_length=150, min_length=40, length_penalty=2.0, num_beams=4, early_stopping=True)
        summary = tokenizer.decode(summary_ids[0], skip_special_tokens=True)
        st.subheader("Summary:")
        st.write(summary)

st.write("Powered by Hugging Face's BART model.")