Spaces:
Sleeping
Sleeping
# This file is autogenerated by the command `make fix-copies`, do not edit. | |
from ..file_utils import requires_backends | |
class AlbertTokenizerFast: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["tokenizers"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["tokenizers"]) | |
class BartTokenizerFast: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["tokenizers"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["tokenizers"]) | |
class BarthezTokenizerFast: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["tokenizers"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["tokenizers"]) | |
class BertTokenizerFast: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["tokenizers"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["tokenizers"]) | |
class BigBirdTokenizerFast: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["tokenizers"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["tokenizers"]) | |
class CamembertTokenizerFast: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["tokenizers"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["tokenizers"]) | |
class CLIPTokenizerFast: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["tokenizers"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["tokenizers"]) | |
class ConvBertTokenizerFast: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["tokenizers"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["tokenizers"]) | |
class DebertaTokenizerFast: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["tokenizers"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["tokenizers"]) | |
class DistilBertTokenizerFast: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["tokenizers"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["tokenizers"]) | |
class DPRContextEncoderTokenizerFast: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["tokenizers"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["tokenizers"]) | |
class DPRQuestionEncoderTokenizerFast: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["tokenizers"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["tokenizers"]) | |
class DPRReaderTokenizerFast: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["tokenizers"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["tokenizers"]) | |
class ElectraTokenizerFast: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["tokenizers"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["tokenizers"]) | |
class FunnelTokenizerFast: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["tokenizers"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["tokenizers"]) | |
class GPT2TokenizerFast: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["tokenizers"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["tokenizers"]) | |
class HerbertTokenizerFast: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["tokenizers"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["tokenizers"]) | |
class LayoutLMTokenizerFast: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["tokenizers"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["tokenizers"]) | |
class LEDTokenizerFast: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["tokenizers"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["tokenizers"]) | |
class LongformerTokenizerFast: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["tokenizers"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["tokenizers"]) | |
class LxmertTokenizerFast: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["tokenizers"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["tokenizers"]) | |
class MBart50TokenizerFast: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["tokenizers"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["tokenizers"]) | |
class MBartTokenizerFast: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["tokenizers"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["tokenizers"]) | |
class MobileBertTokenizerFast: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["tokenizers"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["tokenizers"]) | |
class MPNetTokenizerFast: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["tokenizers"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["tokenizers"]) | |
class MT5TokenizerFast: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["tokenizers"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["tokenizers"]) | |
class OpenAIGPTTokenizerFast: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["tokenizers"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["tokenizers"]) | |
class PegasusTokenizerFast: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["tokenizers"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["tokenizers"]) | |
class ReformerTokenizerFast: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["tokenizers"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["tokenizers"]) | |
class RetriBertTokenizerFast: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["tokenizers"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["tokenizers"]) | |
class RobertaTokenizerFast: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["tokenizers"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["tokenizers"]) | |
class RoFormerTokenizerFast: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["tokenizers"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["tokenizers"]) | |
class SqueezeBertTokenizerFast: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["tokenizers"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["tokenizers"]) | |
class T5TokenizerFast: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["tokenizers"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["tokenizers"]) | |
class XLMRobertaTokenizerFast: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["tokenizers"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["tokenizers"]) | |
class XLNetTokenizerFast: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["tokenizers"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["tokenizers"]) | |
class PreTrainedTokenizerFast: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["tokenizers"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["tokenizers"]) | |