File size: 1,501 Bytes
1d340f6
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
f123066
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
import gradio as gr
from transformers import BartForConditionalGeneration, BartTokenizer

# Load the model
model_name = "philschmid/bart-large-cnn-samsum"
tokenizer = BartTokenizer.from_pretrained(model_name)
model = BartForConditionalGeneration.from_pretrained(model_name)

# Summary length options
length_options = {
    "Short": (30, 50),
    "Medium": (50, 100),
    "Long": (100, 150)
}

# Function to process summarization
def summarize_text(text, summary_length):
    min_len, max_len = length_options.get(summary_length, (50, 100))  # Default to Medium

    inputs = tokenizer(text, return_tensors="pt", max_length=1024, truncation=True)
    summary_ids = model.generate(
        inputs["input_ids"], 
        max_length=max_len, 
        min_length=min_len, 
        length_penalty=1.0, 
        num_beams=6, 
        repetition_penalty=1.2
    )

    return tokenizer.decode(summary_ids[0], skip_special_tokens=True)

# Gradio Interface
with gr.Blocks() as demo:
    gr.Markdown("# AI-Powered Summarizer ✨")
    
    with gr.Row():
        text_input = gr.Textbox(label="Enter Text to Summarize", lines=8, placeholder="Paste your text here...")
    
    summary_length = gr.Radio(["Short", "Medium", "Long"], value="Medium", label="Summary Length")

    summarize_button = gr.Button("Summarize")

    output_text = gr.Textbox(label="Summary", lines=6)

    summarize_button.click(summarize_text, inputs=[text_input, summary_length], outputs=output_text)

# Launch Gradio App
demo.launch()