cakiki's picture
Create app.py
5131ece
raw
history blame
684 Bytes
import streamlit as st
from tokenizers.tools import EncodingVisualizer
from transformers import AutoTokenizer
st.set_page_config(page_title="BigCode Tokenizer", page_icon='πŸ‘©β€πŸ’»', layout="wide")
tokenizer = AutoTokenizer.from_pretrained('bigcode/tokenizer', subfolder="digit-punctuation-bytelevel-bpe-py-js-java-50k")
visualizer = EncodingVisualizer(tokenizer=tokenizer._tokenizer, default_to_notebook=False)
text = st.text_area(label="", placeholder="Text to tokenize")
button_clicked = st.button("Tokenize")
if text or button_clicked:
st.write(f"The input was split into {len(tokenizer.tokenize(text))} tokens.")
st.components.v1.html(visualizer(text), height=1500)