from transformers import GPT2Tokenizer
import gradio as gr


tokenizer = GPT2Tokenizer.from_pretrained("gpt2")

def tokenize(input_text):
    tokens = tokenizer(input_text)["input_ids"]
    return f"Number of tokens: {len(tokens)}"

iface = gr.Interface(fn=tokenize, inputs=gr.inputs.Textbox(lines=7), outputs="text")
iface.launch()