Spaces:
Sleeping
Sleeping
# This file is autogenerated by the command `make fix-copies`, do not edit. | |
from ..file_utils import requires_backends | |
class PyTorchBenchmark: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
class PyTorchBenchmarkArguments: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
class DataCollator: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
class DataCollatorForLanguageModeling: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class DataCollatorForPermutationLanguageModeling: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class DataCollatorForSeq2Seq: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
class DataCollatorForSOP: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
class DataCollatorForTokenClassification: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class DataCollatorForWholeWordMask: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
class DataCollatorWithPadding: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def default_data_collator(*args, **kwargs): | |
requires_backends(default_data_collator, ["torch"]) | |
class GlueDataset: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
class GlueDataTrainingArguments: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
class LineByLineTextDataset: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
class LineByLineWithRefDataset: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
class LineByLineWithSOPTextDataset: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
class SquadDataset: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
class SquadDataTrainingArguments: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
class TextDataset: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
class TextDatasetForNextSentencePrediction: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
class BeamScorer: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
class BeamSearchScorer: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
class ForcedBOSTokenLogitsProcessor: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class ForcedEOSTokenLogitsProcessor: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class HammingDiversityLogitsProcessor: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class InfNanRemoveLogitsProcessor: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class LogitsProcessor: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class LogitsProcessorList: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class LogitsWarper: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
class MinLengthLogitsProcessor: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class NoBadWordsLogitsProcessor: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class NoRepeatNGramLogitsProcessor: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class PrefixConstrainedLogitsProcessor: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class RepetitionPenaltyLogitsProcessor: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class TemperatureLogitsWarper: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
class TopKLogitsWarper: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
class TopPLogitsWarper: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
class MaxLengthCriteria: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
class MaxTimeCriteria: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
class StoppingCriteria: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
class StoppingCriteriaList: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def top_k_top_p_filtering(*args, **kwargs): | |
requires_backends(top_k_top_p_filtering, ["torch"]) | |
class Conv1D: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
class PreTrainedModel: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
def apply_chunking_to_forward(*args, **kwargs): | |
requires_backends(apply_chunking_to_forward, ["torch"]) | |
def prune_layer(*args, **kwargs): | |
requires_backends(prune_layer, ["torch"]) | |
ALBERT_PRETRAINED_MODEL_ARCHIVE_LIST = None | |
class AlbertForMaskedLM: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class AlbertForMultipleChoice: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class AlbertForPreTraining: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
class AlbertForQuestionAnswering: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class AlbertForSequenceClassification: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class AlbertForTokenClassification: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class AlbertModel: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class AlbertPreTrainedModel: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
def load_tf_weights_in_albert(*args, **kwargs): | |
requires_backends(load_tf_weights_in_albert, ["torch"]) | |
MODEL_FOR_CAUSAL_LM_MAPPING = None | |
MODEL_FOR_IMAGE_CLASSIFICATION_MAPPING = None | |
MODEL_FOR_MASKED_LM_MAPPING = None | |
MODEL_FOR_MULTIPLE_CHOICE_MAPPING = None | |
MODEL_FOR_NEXT_SENTENCE_PREDICTION_MAPPING = None | |
MODEL_FOR_OBJECT_DETECTION_MAPPING = None | |
MODEL_FOR_PRETRAINING_MAPPING = None | |
MODEL_FOR_QUESTION_ANSWERING_MAPPING = None | |
MODEL_FOR_SEQ_TO_SEQ_CAUSAL_LM_MAPPING = None | |
MODEL_FOR_SEQUENCE_CLASSIFICATION_MAPPING = None | |
MODEL_FOR_TABLE_QUESTION_ANSWERING_MAPPING = None | |
MODEL_FOR_TOKEN_CLASSIFICATION_MAPPING = None | |
MODEL_MAPPING = None | |
MODEL_WITH_LM_HEAD_MAPPING = None | |
class AutoModel: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class AutoModelForCausalLM: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class AutoModelForImageClassification: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class AutoModelForMaskedLM: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class AutoModelForMultipleChoice: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class AutoModelForNextSentencePrediction: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class AutoModelForPreTraining: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class AutoModelForQuestionAnswering: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class AutoModelForSeq2SeqLM: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class AutoModelForSequenceClassification: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class AutoModelForTableQuestionAnswering: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class AutoModelForTokenClassification: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class AutoModelWithLMHead: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
BART_PRETRAINED_MODEL_ARCHIVE_LIST = None | |
class BartForCausalLM: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class BartForConditionalGeneration: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class BartForQuestionAnswering: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class BartForSequenceClassification: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class BartModel: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class BartPretrainedModel: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class PretrainedBartModel: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
BERT_PRETRAINED_MODEL_ARCHIVE_LIST = None | |
class BertForMaskedLM: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class BertForMultipleChoice: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class BertForNextSentencePrediction: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
class BertForPreTraining: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
class BertForQuestionAnswering: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class BertForSequenceClassification: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class BertForTokenClassification: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class BertLayer: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
class BertLMHeadModel: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class BertModel: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class BertPreTrainedModel: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
def load_tf_weights_in_bert(*args, **kwargs): | |
requires_backends(load_tf_weights_in_bert, ["torch"]) | |
class BertGenerationDecoder: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
class BertGenerationEncoder: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
class BertGenerationPreTrainedModel: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
def load_tf_weights_in_bert_generation(*args, **kwargs): | |
requires_backends(load_tf_weights_in_bert_generation, ["torch"]) | |
BIG_BIRD_PRETRAINED_MODEL_ARCHIVE_LIST = None | |
class BigBirdForCausalLM: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class BigBirdForMaskedLM: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class BigBirdForMultipleChoice: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class BigBirdForPreTraining: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
class BigBirdForQuestionAnswering: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class BigBirdForSequenceClassification: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class BigBirdForTokenClassification: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class BigBirdLayer: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
class BigBirdModel: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class BigBirdPreTrainedModel: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
def load_tf_weights_in_big_bird(*args, **kwargs): | |
requires_backends(load_tf_weights_in_big_bird, ["torch"]) | |
BIGBIRD_PEGASUS_PRETRAINED_MODEL_ARCHIVE_LIST = None | |
class BigBirdPegasusForCausalLM: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class BigBirdPegasusForConditionalGeneration: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class BigBirdPegasusForQuestionAnswering: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class BigBirdPegasusForSequenceClassification: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class BigBirdPegasusModel: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class BigBirdPegasusPreTrainedModel: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
BLENDERBOT_PRETRAINED_MODEL_ARCHIVE_LIST = None | |
class BlenderbotForCausalLM: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class BlenderbotForConditionalGeneration: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class BlenderbotModel: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class BlenderbotPreTrainedModel: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
BLENDERBOT_SMALL_PRETRAINED_MODEL_ARCHIVE_LIST = None | |
class BlenderbotSmallForCausalLM: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class BlenderbotSmallForConditionalGeneration: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class BlenderbotSmallModel: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class BlenderbotSmallPreTrainedModel: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
CAMEMBERT_PRETRAINED_MODEL_ARCHIVE_LIST = None | |
class CamembertForCausalLM: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class CamembertForMaskedLM: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class CamembertForMultipleChoice: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class CamembertForQuestionAnswering: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class CamembertForSequenceClassification: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class CamembertForTokenClassification: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class CamembertModel: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
CANINE_PRETRAINED_MODEL_ARCHIVE_LIST = None | |
class CanineForMultipleChoice: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class CanineForQuestionAnswering: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class CanineForSequenceClassification: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class CanineForTokenClassification: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class CanineLayer: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
class CanineModel: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class CaninePreTrainedModel: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
def load_tf_weights_in_canine(*args, **kwargs): | |
requires_backends(load_tf_weights_in_canine, ["torch"]) | |
CLIP_PRETRAINED_MODEL_ARCHIVE_LIST = None | |
class CLIPModel: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class CLIPPreTrainedModel: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class CLIPTextModel: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class CLIPVisionModel: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
CONVBERT_PRETRAINED_MODEL_ARCHIVE_LIST = None | |
class ConvBertForMaskedLM: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class ConvBertForMultipleChoice: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class ConvBertForQuestionAnswering: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class ConvBertForSequenceClassification: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class ConvBertForTokenClassification: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class ConvBertLayer: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
class ConvBertModel: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class ConvBertPreTrainedModel: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
def load_tf_weights_in_convbert(*args, **kwargs): | |
requires_backends(load_tf_weights_in_convbert, ["torch"]) | |
CTRL_PRETRAINED_MODEL_ARCHIVE_LIST = None | |
class CTRLForSequenceClassification: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class CTRLLMHeadModel: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class CTRLModel: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class CTRLPreTrainedModel: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
DEBERTA_PRETRAINED_MODEL_ARCHIVE_LIST = None | |
class DebertaForMaskedLM: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class DebertaForQuestionAnswering: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class DebertaForSequenceClassification: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class DebertaForTokenClassification: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class DebertaModel: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class DebertaPreTrainedModel: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
DEBERTA_V2_PRETRAINED_MODEL_ARCHIVE_LIST = None | |
class DebertaV2ForMaskedLM: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class DebertaV2ForQuestionAnswering: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class DebertaV2ForSequenceClassification: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class DebertaV2ForTokenClassification: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class DebertaV2Model: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class DebertaV2PreTrainedModel: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
DEIT_PRETRAINED_MODEL_ARCHIVE_LIST = None | |
class DeiTForImageClassification: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
class DeiTForImageClassificationWithTeacher: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
class DeiTModel: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class DeiTPreTrainedModel: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
DISTILBERT_PRETRAINED_MODEL_ARCHIVE_LIST = None | |
class DistilBertForMaskedLM: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class DistilBertForMultipleChoice: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class DistilBertForQuestionAnswering: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class DistilBertForSequenceClassification: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class DistilBertForTokenClassification: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class DistilBertModel: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class DistilBertPreTrainedModel: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
DPR_CONTEXT_ENCODER_PRETRAINED_MODEL_ARCHIVE_LIST = None | |
DPR_QUESTION_ENCODER_PRETRAINED_MODEL_ARCHIVE_LIST = None | |
DPR_READER_PRETRAINED_MODEL_ARCHIVE_LIST = None | |
class DPRContextEncoder: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
class DPRPretrainedContextEncoder: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
class DPRPretrainedQuestionEncoder: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
class DPRPretrainedReader: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
class DPRQuestionEncoder: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
class DPRReader: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
ELECTRA_PRETRAINED_MODEL_ARCHIVE_LIST = None | |
class ElectraForMaskedLM: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class ElectraForMultipleChoice: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class ElectraForPreTraining: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
class ElectraForQuestionAnswering: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class ElectraForSequenceClassification: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class ElectraForTokenClassification: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class ElectraModel: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class ElectraPreTrainedModel: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
def load_tf_weights_in_electra(*args, **kwargs): | |
requires_backends(load_tf_weights_in_electra, ["torch"]) | |
class EncoderDecoderModel: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
FLAUBERT_PRETRAINED_MODEL_ARCHIVE_LIST = None | |
class FlaubertForMultipleChoice: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class FlaubertForQuestionAnswering: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class FlaubertForQuestionAnsweringSimple: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class FlaubertForSequenceClassification: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class FlaubertForTokenClassification: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class FlaubertModel: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class FlaubertWithLMHeadModel: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class FSMTForConditionalGeneration: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class FSMTModel: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class PretrainedFSMTModel: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
FUNNEL_PRETRAINED_MODEL_ARCHIVE_LIST = None | |
class FunnelBaseModel: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class FunnelForMaskedLM: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class FunnelForMultipleChoice: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class FunnelForPreTraining: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
class FunnelForQuestionAnswering: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class FunnelForSequenceClassification: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class FunnelForTokenClassification: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class FunnelModel: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class FunnelPreTrainedModel: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
def load_tf_weights_in_funnel(*args, **kwargs): | |
requires_backends(load_tf_weights_in_funnel, ["torch"]) | |
GPT2_PRETRAINED_MODEL_ARCHIVE_LIST = None | |
class GPT2DoubleHeadsModel: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class GPT2ForSequenceClassification: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class GPT2LMHeadModel: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class GPT2Model: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class GPT2PreTrainedModel: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
def load_tf_weights_in_gpt2(*args, **kwargs): | |
requires_backends(load_tf_weights_in_gpt2, ["torch"]) | |
GPT_NEO_PRETRAINED_MODEL_ARCHIVE_LIST = None | |
class GPTNeoForCausalLM: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class GPTNeoForSequenceClassification: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class GPTNeoModel: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class GPTNeoPreTrainedModel: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
def load_tf_weights_in_gpt_neo(*args, **kwargs): | |
requires_backends(load_tf_weights_in_gpt_neo, ["torch"]) | |
HUBERT_PRETRAINED_MODEL_ARCHIVE_LIST = None | |
class HubertForCTC: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
class HubertModel: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class HubertPreTrainedModel: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
IBERT_PRETRAINED_MODEL_ARCHIVE_LIST = None | |
class IBertForMaskedLM: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class IBertForMultipleChoice: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class IBertForQuestionAnswering: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class IBertForSequenceClassification: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class IBertForTokenClassification: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class IBertModel: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class IBertPreTrainedModel: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
LAYOUTLM_PRETRAINED_MODEL_ARCHIVE_LIST = None | |
class LayoutLMForMaskedLM: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class LayoutLMForSequenceClassification: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class LayoutLMForTokenClassification: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class LayoutLMModel: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class LayoutLMPreTrainedModel: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
LED_PRETRAINED_MODEL_ARCHIVE_LIST = None | |
class LEDForConditionalGeneration: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class LEDForQuestionAnswering: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class LEDForSequenceClassification: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class LEDModel: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class LEDPreTrainedModel: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
LONGFORMER_PRETRAINED_MODEL_ARCHIVE_LIST = None | |
class LongformerForMaskedLM: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class LongformerForMultipleChoice: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class LongformerForQuestionAnswering: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class LongformerForSequenceClassification: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class LongformerForTokenClassification: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class LongformerModel: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class LongformerPreTrainedModel: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class LongformerSelfAttention: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
LUKE_PRETRAINED_MODEL_ARCHIVE_LIST = None | |
class LukeForEntityClassification: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
class LukeForEntityPairClassification: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
class LukeForEntitySpanClassification: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
class LukeModel: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class LukePreTrainedModel: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class LxmertEncoder: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
class LxmertForPreTraining: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
class LxmertForQuestionAnswering: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class LxmertModel: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class LxmertPreTrainedModel: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class LxmertVisualFeatureEncoder: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
class LxmertXLayer: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
M2M_100_PRETRAINED_MODEL_ARCHIVE_LIST = None | |
class M2M100ForConditionalGeneration: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class M2M100Model: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class M2M100PreTrainedModel: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class MarianForCausalLM: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class MarianModel: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class MarianMTModel: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class MBartForCausalLM: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class MBartForConditionalGeneration: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class MBartForQuestionAnswering: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class MBartForSequenceClassification: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class MBartModel: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class MBartPreTrainedModel: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
MEGATRON_BERT_PRETRAINED_MODEL_ARCHIVE_LIST = None | |
class MegatronBertForCausalLM: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class MegatronBertForMaskedLM: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class MegatronBertForMultipleChoice: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class MegatronBertForNextSentencePrediction: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
class MegatronBertForPreTraining: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
class MegatronBertForQuestionAnswering: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class MegatronBertForSequenceClassification: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class MegatronBertForTokenClassification: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class MegatronBertModel: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class MegatronBertPreTrainedModel: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class MMBTForClassification: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
class MMBTModel: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class ModalEmbeddings: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
MOBILEBERT_PRETRAINED_MODEL_ARCHIVE_LIST = None | |
class MobileBertForMaskedLM: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class MobileBertForMultipleChoice: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class MobileBertForNextSentencePrediction: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
class MobileBertForPreTraining: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
class MobileBertForQuestionAnswering: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class MobileBertForSequenceClassification: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class MobileBertForTokenClassification: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class MobileBertLayer: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
class MobileBertModel: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class MobileBertPreTrainedModel: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
def load_tf_weights_in_mobilebert(*args, **kwargs): | |
requires_backends(load_tf_weights_in_mobilebert, ["torch"]) | |
MPNET_PRETRAINED_MODEL_ARCHIVE_LIST = None | |
class MPNetForMaskedLM: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class MPNetForMultipleChoice: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class MPNetForQuestionAnswering: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class MPNetForSequenceClassification: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class MPNetForTokenClassification: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class MPNetLayer: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
class MPNetModel: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class MPNetPreTrainedModel: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class MT5EncoderModel: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class MT5ForConditionalGeneration: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class MT5Model: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
OPENAI_GPT_PRETRAINED_MODEL_ARCHIVE_LIST = None | |
class OpenAIGPTDoubleHeadsModel: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class OpenAIGPTForSequenceClassification: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class OpenAIGPTLMHeadModel: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class OpenAIGPTModel: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class OpenAIGPTPreTrainedModel: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
def load_tf_weights_in_openai_gpt(*args, **kwargs): | |
requires_backends(load_tf_weights_in_openai_gpt, ["torch"]) | |
class PegasusForCausalLM: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class PegasusForConditionalGeneration: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class PegasusModel: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class PegasusPreTrainedModel: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
PROPHETNET_PRETRAINED_MODEL_ARCHIVE_LIST = None | |
class ProphetNetDecoder: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
class ProphetNetEncoder: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
class ProphetNetForCausalLM: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class ProphetNetForConditionalGeneration: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class ProphetNetModel: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class ProphetNetPreTrainedModel: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class RagModel: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class RagPreTrainedModel: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class RagSequenceForGeneration: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
class RagTokenForGeneration: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
REFORMER_PRETRAINED_MODEL_ARCHIVE_LIST = None | |
class ReformerAttention: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
class ReformerForMaskedLM: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class ReformerForQuestionAnswering: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class ReformerForSequenceClassification: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class ReformerLayer: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
class ReformerModel: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class ReformerModelWithLMHead: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class ReformerPreTrainedModel: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
RETRIBERT_PRETRAINED_MODEL_ARCHIVE_LIST = None | |
class RetriBertModel: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class RetriBertPreTrainedModel: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
ROBERTA_PRETRAINED_MODEL_ARCHIVE_LIST = None | |
class RobertaForCausalLM: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class RobertaForMaskedLM: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class RobertaForMultipleChoice: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class RobertaForQuestionAnswering: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class RobertaForSequenceClassification: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class RobertaForTokenClassification: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class RobertaModel: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class RobertaPreTrainedModel: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
ROFORMER_PRETRAINED_MODEL_ARCHIVE_LIST = None | |
class RoFormerForCausalLM: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class RoFormerForMaskedLM: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class RoFormerForMultipleChoice: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class RoFormerForQuestionAnswering: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class RoFormerForSequenceClassification: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class RoFormerForTokenClassification: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class RoFormerLayer: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
class RoFormerModel: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class RoFormerPreTrainedModel: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
def load_tf_weights_in_roformer(*args, **kwargs): | |
requires_backends(load_tf_weights_in_roformer, ["torch"]) | |
SPEECH_TO_TEXT_PRETRAINED_MODEL_ARCHIVE_LIST = None | |
class Speech2TextForConditionalGeneration: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class Speech2TextModel: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class Speech2TextPreTrainedModel: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
SQUEEZEBERT_PRETRAINED_MODEL_ARCHIVE_LIST = None | |
class SqueezeBertForMaskedLM: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class SqueezeBertForMultipleChoice: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class SqueezeBertForQuestionAnswering: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class SqueezeBertForSequenceClassification: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class SqueezeBertForTokenClassification: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class SqueezeBertModel: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class SqueezeBertModule: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
class SqueezeBertPreTrainedModel: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
T5_PRETRAINED_MODEL_ARCHIVE_LIST = None | |
class T5EncoderModel: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class T5ForConditionalGeneration: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class T5Model: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class T5PreTrainedModel: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
def load_tf_weights_in_t5(*args, **kwargs): | |
requires_backends(load_tf_weights_in_t5, ["torch"]) | |
TAPAS_PRETRAINED_MODEL_ARCHIVE_LIST = None | |
class TapasForMaskedLM: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class TapasForQuestionAnswering: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class TapasForSequenceClassification: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class TapasModel: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class TapasPreTrainedModel: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
TRANSFO_XL_PRETRAINED_MODEL_ARCHIVE_LIST = None | |
class AdaptiveEmbedding: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
class TransfoXLForSequenceClassification: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class TransfoXLLMHeadModel: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class TransfoXLModel: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class TransfoXLPreTrainedModel: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
def load_tf_weights_in_transfo_xl(*args, **kwargs): | |
requires_backends(load_tf_weights_in_transfo_xl, ["torch"]) | |
VISUAL_BERT_PRETRAINED_MODEL_ARCHIVE_LIST = None | |
class VisualBertForMultipleChoice: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class VisualBertForPreTraining: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
class VisualBertForQuestionAnswering: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class VisualBertForRegionToPhraseAlignment: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
class VisualBertForVisualReasoning: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
class VisualBertLayer: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
class VisualBertModel: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class VisualBertPreTrainedModel: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
VIT_PRETRAINED_MODEL_ARCHIVE_LIST = None | |
class ViTForImageClassification: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
class ViTModel: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class ViTPreTrainedModel: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
WAV_2_VEC_2_PRETRAINED_MODEL_ARCHIVE_LIST = None | |
class Wav2Vec2ForCTC: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
class Wav2Vec2ForMaskedLM: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class Wav2Vec2ForPreTraining: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
class Wav2Vec2Model: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class Wav2Vec2PreTrainedModel: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
XLM_PRETRAINED_MODEL_ARCHIVE_LIST = None | |
class XLMForMultipleChoice: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class XLMForQuestionAnswering: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class XLMForQuestionAnsweringSimple: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class XLMForSequenceClassification: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class XLMForTokenClassification: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class XLMModel: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class XLMPreTrainedModel: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class XLMWithLMHeadModel: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
XLM_PROPHETNET_PRETRAINED_MODEL_ARCHIVE_LIST = None | |
class XLMProphetNetDecoder: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
class XLMProphetNetEncoder: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
class XLMProphetNetForCausalLM: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class XLMProphetNetForConditionalGeneration: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class XLMProphetNetModel: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
XLM_ROBERTA_PRETRAINED_MODEL_ARCHIVE_LIST = None | |
class XLMRobertaForCausalLM: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class XLMRobertaForMaskedLM: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class XLMRobertaForMultipleChoice: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class XLMRobertaForQuestionAnswering: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class XLMRobertaForSequenceClassification: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class XLMRobertaForTokenClassification: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class XLMRobertaModel: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
XLNET_PRETRAINED_MODEL_ARCHIVE_LIST = None | |
class XLNetForMultipleChoice: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class XLNetForQuestionAnswering: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class XLNetForQuestionAnsweringSimple: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class XLNetForSequenceClassification: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class XLNetForTokenClassification: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class XLNetLMHeadModel: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class XLNetModel: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
class XLNetPreTrainedModel: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def from_pretrained(cls, *args, **kwargs): | |
requires_backends(cls, ["torch"]) | |
def load_tf_weights_in_xlnet(*args, **kwargs): | |
requires_backends(load_tf_weights_in_xlnet, ["torch"]) | |
class Adafactor: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
class AdamW: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def get_constant_schedule(*args, **kwargs): | |
requires_backends(get_constant_schedule, ["torch"]) | |
def get_constant_schedule_with_warmup(*args, **kwargs): | |
requires_backends(get_constant_schedule_with_warmup, ["torch"]) | |
def get_cosine_schedule_with_warmup(*args, **kwargs): | |
requires_backends(get_cosine_schedule_with_warmup, ["torch"]) | |
def get_cosine_with_hard_restarts_schedule_with_warmup(*args, **kwargs): | |
requires_backends(get_cosine_with_hard_restarts_schedule_with_warmup, ["torch"]) | |
def get_linear_schedule_with_warmup(*args, **kwargs): | |
requires_backends(get_linear_schedule_with_warmup, ["torch"]) | |
def get_polynomial_decay_schedule_with_warmup(*args, **kwargs): | |
requires_backends(get_polynomial_decay_schedule_with_warmup, ["torch"]) | |
def get_scheduler(*args, **kwargs): | |
requires_backends(get_scheduler, ["torch"]) | |
class Trainer: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |
def torch_distributed_zero_first(*args, **kwargs): | |
requires_backends(torch_distributed_zero_first, ["torch"]) | |
class Seq2SeqTrainer: | |
def __init__(self, *args, **kwargs): | |
requires_backends(self, ["torch"]) | |