Spaces:
Runtime error
Runtime error
File size: 1,581 Bytes
66b3df6 61a3b2f bee3802 9998155 61a3b2f 9998155 66b3df6 9998155 66b3df6 ab010ed bee3802 66b3df6 9998155 66b3df6 bee3802 ab010ed 9998155 66b3df6 fcbfd45 9998155 fcbfd45 9998155 61a3b2f |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 |
from gradio import Interface
import gradio as gr
import aranizer
from aranizer import aranizer_bpe50k, aranizer_bpe64k, aranizer_bpe86k, aranizer_sp32k, aranizer_sp50k, aranizer_sp64k, aranizer_sp86k
# Correct way to load all available tokenizers as per the provided usage information
tokenizers = {
"aranizer_bpe50k": aranizer_bpe50k.get_tokenizer(),
"aranizer_bpe64k": aranizer_bpe64k.get_tokenizer(),
"aranizer_bpe86k": aranizer_bpe86k.get_tokenizer(),
"aranizer_sp32k": aranizer_sp32k.get_tokenizer(),
"aranizer_sp50k": aranizer_sp50k.get_tokenizer(),
"aranizer_sp64k": aranizer_sp64k.get_tokenizer(),
"aranizer_sp86k": aranizer_sp86k.get_tokenizer(),
}
def compare_tokenizers(text):
results = []
for name, tokenizer in tokenizers.items():
tokens = tokenizer.tokenize(text)
encoded_output = tokenizer.encode(text, add_special_tokens=True)
decoded_text = tokenizer.decode(encoded_output)
# Collect each tokenizer's results
results.append((name, tokens, encoded_output, decoded_text))
return results
# Correctly use Gradio's components for inputs and outputs
inputs_component = gr.components.Textbox(lines=2, placeholder="Enter Arabic text here...", label="Input Text")
outputs_component = gr.components.Table(label="Results", headers=["Tokenizer", "Tokens", "Encoded Output", "Decoded Text"])
# Setting up the interface
iface = Interface(fn=compare_tokenizers, inputs=inputs_component, outputs=outputs_component, title="AraNizer Tokenizer Comparison")
# Launching the Gradio app
iface.launch() |