ailm's picture
created app.py
40010d5 verified
raw
history blame
571 Bytes
from transformers import PegasusForConditionalGeneration, PegasusTokenizer
import gradio as gr
model_name = "ailm/pegasus-samsum-model"
model = PegasusForConditionalGeneration.from_pretrained(model_name)
tokenizer = PegasusTokenizer.from_pretrained(f"{model_name}/tokenizer")
def summarize(text):
tokens = tokenizer(text, truncation=True, padding="longest", return_tensors="pt")
summary = model.generate(**tokens)
return tokenizer.decode(summary[0], skip_special_tokens=True)
iface = gr.Interface(fn=summarize, inputs="text", outputs="text")
iface.launch()