Spaces:
Sleeping
Sleeping
import gradio as gr | |
import torch | |
from transformers import T5ForConditionalGeneration,AutoTokenizer,pipeline | |
tokenizer = AutoTokenizer.from_pretrained('SeyedAli/Persian-Text-paraphraser-mT5-V1',model_max_length=100, add_special_tokens = True) | |
model = T5ForConditionalGeneration.from_pretrained('SeyedAli/Persian-Text-paraphraser-mT5-V1') | |
def NER(text): | |
pipline = pipeline(task='text2text-generation', model=model, tokenizer=tokenizer) | |
output=pipline(text) | |
return output | |
iface = gr.Interface(fn=NER, inputs="text", outputs="text") | |
iface.launch(share=False) |