from transformers import AutoTokenizer, AutoModelForSeq2SeqLM import gradio as gr model_name = "deep-learning-analytics/wikihow-t5-small" model = AutoModelForSeq2SeqLM.from_pretrained(model_name) text2text_tokenizer = AutoTokenizer.from_pretrained(model_name) def text2text(paragraph): text = paragraph.strip().replace("\n", "") token_text = text2text_tokenizer.encode(text, return_tensors="pt") token_ids = model.generate(token_text, max_length=250, num_beams=5, repetition_penality=2.5, early_stopping=True) response = text2text_tokenizer.decode(token_ids[0], skip_special_tokens=True) return response in_para = gr.Textbox(lines=10, label="Paragraph", placeholder="Copy paragraph") out = gr.Textbox(lines=1, label="Summary") gr.Interface(text2text, inputs=in_para, outputs=out)