Spaces:
Sleeping
Sleeping
File size: 1,327 Bytes
3d4f13a b5f7961 3d4f13a ef9b88b 3b68341 ef9b88b 3b68341 ef9b88b 3b68341 4f70f9f b5f7961 3d4f13a 1335053 b5f7961 1335053 b5f7961 4f70f9f 1335053 4f70f9f 1335053 4f70f9f 1335053 4f70f9f 1335053 4f70f9f 1335053 4f70f9f 3b68341 b5f7961 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 |
import gradio as gr
from transformers import M2M100ForConditionalGeneration, M2M100Tokenizer
model_name = "dsfsi/nr-en-m2m100-gov"
tokenizer = M2M100Tokenizer.from_pretrained(model_name)
model = M2M100ForConditionalGeneration.from_pretrained(model_name)
tokenizer.src_lang = "nr"
model.config.forced_bos_token_id = tokenizer.get_lang_id("en")
def translate(inp):
inputs = tokenizer(inp, return_tensors="pt")
translated_tokens = model.generate(**inputs, max_length=512, forced_bos_token_id=tokenizer.get_lang_id("en"))
translated_text = tokenizer.decode(translated_tokens[0], skip_special_tokens=True)
return translated_text
description = """
<p>
<center>
isiNdebele to English Translation
</center>
</p>
"""
article = "<p style='text-align: center'><a href='https://huggingface.co/dsfsi/nr-en-m2m100-gov' target='_blank'>by dsfsi</a></p>"
examples = [
["Ngiyabonga kakhulu ngesipho osinike sona."],
["Ukuthula kuhlale kuyindlela ephilayo yempilo yethu."]
]
iface = gr.Interface(
fn=translate,
title="isiNdebele to English Translation",
description=description,
article=article,
examples=examples,
inputs=gr.components.Textbox(lines=5, placeholder="Enter isiNdebele text (maximum 5 lines)", label="Input"),
outputs="text"
)
iface.launch(enable_queue=True)
|