Bart-summarizer / app.py
ayushhh05's picture
Update app.py
f123066 verified
import gradio as gr
from transformers import BartForConditionalGeneration, BartTokenizer
# Load the model
model_name = "philschmid/bart-large-cnn-samsum"
tokenizer = BartTokenizer.from_pretrained(model_name)
model = BartForConditionalGeneration.from_pretrained(model_name)
# Summary length options
length_options = {
"Short": (30, 50),
"Medium": (50, 100),
"Long": (100, 150)
}
# Function to process summarization
def summarize_text(text, summary_length):
min_len, max_len = length_options.get(summary_length, (50, 100)) # Default to Medium
inputs = tokenizer(text, return_tensors="pt", max_length=1024, truncation=True)
summary_ids = model.generate(
inputs["input_ids"],
max_length=max_len,
min_length=min_len,
length_penalty=1.0,
num_beams=6,
repetition_penalty=1.2
)
return tokenizer.decode(summary_ids[0], skip_special_tokens=True)
# Gradio Interface
with gr.Blocks() as demo:
gr.Markdown("# AI-Powered Summarizer ✨")
with gr.Row():
text_input = gr.Textbox(label="Enter Text to Summarize", lines=8, placeholder="Paste your text here...")
summary_length = gr.Radio(["Short", "Medium", "Long"], value="Medium", label="Summary Length")
summarize_button = gr.Button("Summarize")
output_text = gr.Textbox(label="Summary", lines=6)
summarize_button.click(summarize_text, inputs=[text_input, summary_length], outputs=output_text)
# Launch Gradio App
demo.launch()