| 123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457 |
- # This file is autogenerated by the command `make fix-copies`, do not edit.
- from ..utils import DummyObject, requires_backends
- class AlbertTokenizerFast(metaclass=DummyObject):
- _backends = ["tokenizers"]
- def __init__(self, *args, **kwargs):
- requires_backends(self, ["tokenizers"])
- class BartTokenizerFast(metaclass=DummyObject):
- _backends = ["tokenizers"]
- def __init__(self, *args, **kwargs):
- requires_backends(self, ["tokenizers"])
- class BarthezTokenizerFast(metaclass=DummyObject):
- _backends = ["tokenizers"]
- def __init__(self, *args, **kwargs):
- requires_backends(self, ["tokenizers"])
- class BertTokenizerFast(metaclass=DummyObject):
- _backends = ["tokenizers"]
- def __init__(self, *args, **kwargs):
- requires_backends(self, ["tokenizers"])
- class BigBirdTokenizerFast(metaclass=DummyObject):
- _backends = ["tokenizers"]
- def __init__(self, *args, **kwargs):
- requires_backends(self, ["tokenizers"])
- class BlenderbotTokenizerFast(metaclass=DummyObject):
- _backends = ["tokenizers"]
- def __init__(self, *args, **kwargs):
- requires_backends(self, ["tokenizers"])
- class BlenderbotSmallTokenizerFast(metaclass=DummyObject):
- _backends = ["tokenizers"]
- def __init__(self, *args, **kwargs):
- requires_backends(self, ["tokenizers"])
- class BloomTokenizerFast(metaclass=DummyObject):
- _backends = ["tokenizers"]
- def __init__(self, *args, **kwargs):
- requires_backends(self, ["tokenizers"])
- class CamembertTokenizerFast(metaclass=DummyObject):
- _backends = ["tokenizers"]
- def __init__(self, *args, **kwargs):
- requires_backends(self, ["tokenizers"])
- class CLIPTokenizerFast(metaclass=DummyObject):
- _backends = ["tokenizers"]
- def __init__(self, *args, **kwargs):
- requires_backends(self, ["tokenizers"])
- class CodeLlamaTokenizerFast(metaclass=DummyObject):
- _backends = ["tokenizers"]
- def __init__(self, *args, **kwargs):
- requires_backends(self, ["tokenizers"])
- class CodeGenTokenizerFast(metaclass=DummyObject):
- _backends = ["tokenizers"]
- def __init__(self, *args, **kwargs):
- requires_backends(self, ["tokenizers"])
- class CohereTokenizerFast(metaclass=DummyObject):
- _backends = ["tokenizers"]
- def __init__(self, *args, **kwargs):
- requires_backends(self, ["tokenizers"])
- class ConvBertTokenizerFast(metaclass=DummyObject):
- _backends = ["tokenizers"]
- def __init__(self, *args, **kwargs):
- requires_backends(self, ["tokenizers"])
- class CpmTokenizerFast(metaclass=DummyObject):
- _backends = ["tokenizers"]
- def __init__(self, *args, **kwargs):
- requires_backends(self, ["tokenizers"])
- class DebertaTokenizerFast(metaclass=DummyObject):
- _backends = ["tokenizers"]
- def __init__(self, *args, **kwargs):
- requires_backends(self, ["tokenizers"])
- class DebertaV2TokenizerFast(metaclass=DummyObject):
- _backends = ["tokenizers"]
- def __init__(self, *args, **kwargs):
- requires_backends(self, ["tokenizers"])
- class RealmTokenizerFast(metaclass=DummyObject):
- _backends = ["tokenizers"]
- def __init__(self, *args, **kwargs):
- requires_backends(self, ["tokenizers"])
- class RetriBertTokenizerFast(metaclass=DummyObject):
- _backends = ["tokenizers"]
- def __init__(self, *args, **kwargs):
- requires_backends(self, ["tokenizers"])
- class DistilBertTokenizerFast(metaclass=DummyObject):
- _backends = ["tokenizers"]
- def __init__(self, *args, **kwargs):
- requires_backends(self, ["tokenizers"])
- class DPRContextEncoderTokenizerFast(metaclass=DummyObject):
- _backends = ["tokenizers"]
- def __init__(self, *args, **kwargs):
- requires_backends(self, ["tokenizers"])
- class DPRQuestionEncoderTokenizerFast(metaclass=DummyObject):
- _backends = ["tokenizers"]
- def __init__(self, *args, **kwargs):
- requires_backends(self, ["tokenizers"])
- class DPRReaderTokenizerFast(metaclass=DummyObject):
- _backends = ["tokenizers"]
- def __init__(self, *args, **kwargs):
- requires_backends(self, ["tokenizers"])
- class ElectraTokenizerFast(metaclass=DummyObject):
- _backends = ["tokenizers"]
- def __init__(self, *args, **kwargs):
- requires_backends(self, ["tokenizers"])
- class FNetTokenizerFast(metaclass=DummyObject):
- _backends = ["tokenizers"]
- def __init__(self, *args, **kwargs):
- requires_backends(self, ["tokenizers"])
- class FunnelTokenizerFast(metaclass=DummyObject):
- _backends = ["tokenizers"]
- def __init__(self, *args, **kwargs):
- requires_backends(self, ["tokenizers"])
- class GemmaTokenizerFast(metaclass=DummyObject):
- _backends = ["tokenizers"]
- def __init__(self, *args, **kwargs):
- requires_backends(self, ["tokenizers"])
- class GPT2TokenizerFast(metaclass=DummyObject):
- _backends = ["tokenizers"]
- def __init__(self, *args, **kwargs):
- requires_backends(self, ["tokenizers"])
- class GPTNeoXTokenizerFast(metaclass=DummyObject):
- _backends = ["tokenizers"]
- def __init__(self, *args, **kwargs):
- requires_backends(self, ["tokenizers"])
- class GPTNeoXJapaneseTokenizer(metaclass=DummyObject):
- _backends = ["tokenizers"]
- def __init__(self, *args, **kwargs):
- requires_backends(self, ["tokenizers"])
- class HerbertTokenizerFast(metaclass=DummyObject):
- _backends = ["tokenizers"]
- def __init__(self, *args, **kwargs):
- requires_backends(self, ["tokenizers"])
- class LayoutLMTokenizerFast(metaclass=DummyObject):
- _backends = ["tokenizers"]
- def __init__(self, *args, **kwargs):
- requires_backends(self, ["tokenizers"])
- class LayoutLMv2TokenizerFast(metaclass=DummyObject):
- _backends = ["tokenizers"]
- def __init__(self, *args, **kwargs):
- requires_backends(self, ["tokenizers"])
- class LayoutLMv3TokenizerFast(metaclass=DummyObject):
- _backends = ["tokenizers"]
- def __init__(self, *args, **kwargs):
- requires_backends(self, ["tokenizers"])
- class LayoutXLMTokenizerFast(metaclass=DummyObject):
- _backends = ["tokenizers"]
- def __init__(self, *args, **kwargs):
- requires_backends(self, ["tokenizers"])
- class LEDTokenizerFast(metaclass=DummyObject):
- _backends = ["tokenizers"]
- def __init__(self, *args, **kwargs):
- requires_backends(self, ["tokenizers"])
- class LlamaTokenizerFast(metaclass=DummyObject):
- _backends = ["tokenizers"]
- def __init__(self, *args, **kwargs):
- requires_backends(self, ["tokenizers"])
- class LongformerTokenizerFast(metaclass=DummyObject):
- _backends = ["tokenizers"]
- def __init__(self, *args, **kwargs):
- requires_backends(self, ["tokenizers"])
- class LxmertTokenizerFast(metaclass=DummyObject):
- _backends = ["tokenizers"]
- def __init__(self, *args, **kwargs):
- requires_backends(self, ["tokenizers"])
- class MarkupLMTokenizerFast(metaclass=DummyObject):
- _backends = ["tokenizers"]
- def __init__(self, *args, **kwargs):
- requires_backends(self, ["tokenizers"])
- class MBartTokenizerFast(metaclass=DummyObject):
- _backends = ["tokenizers"]
- def __init__(self, *args, **kwargs):
- requires_backends(self, ["tokenizers"])
- class MBart50TokenizerFast(metaclass=DummyObject):
- _backends = ["tokenizers"]
- def __init__(self, *args, **kwargs):
- requires_backends(self, ["tokenizers"])
- class MobileBertTokenizerFast(metaclass=DummyObject):
- _backends = ["tokenizers"]
- def __init__(self, *args, **kwargs):
- requires_backends(self, ["tokenizers"])
- class MPNetTokenizerFast(metaclass=DummyObject):
- _backends = ["tokenizers"]
- def __init__(self, *args, **kwargs):
- requires_backends(self, ["tokenizers"])
- class MT5TokenizerFast(metaclass=DummyObject):
- _backends = ["tokenizers"]
- def __init__(self, *args, **kwargs):
- requires_backends(self, ["tokenizers"])
- class MvpTokenizerFast(metaclass=DummyObject):
- _backends = ["tokenizers"]
- def __init__(self, *args, **kwargs):
- requires_backends(self, ["tokenizers"])
- class NllbTokenizerFast(metaclass=DummyObject):
- _backends = ["tokenizers"]
- def __init__(self, *args, **kwargs):
- requires_backends(self, ["tokenizers"])
- class NougatTokenizerFast(metaclass=DummyObject):
- _backends = ["tokenizers"]
- def __init__(self, *args, **kwargs):
- requires_backends(self, ["tokenizers"])
- class OpenAIGPTTokenizerFast(metaclass=DummyObject):
- _backends = ["tokenizers"]
- def __init__(self, *args, **kwargs):
- requires_backends(self, ["tokenizers"])
- class PegasusTokenizerFast(metaclass=DummyObject):
- _backends = ["tokenizers"]
- def __init__(self, *args, **kwargs):
- requires_backends(self, ["tokenizers"])
- class Qwen2TokenizerFast(metaclass=DummyObject):
- _backends = ["tokenizers"]
- def __init__(self, *args, **kwargs):
- requires_backends(self, ["tokenizers"])
- class ReformerTokenizerFast(metaclass=DummyObject):
- _backends = ["tokenizers"]
- def __init__(self, *args, **kwargs):
- requires_backends(self, ["tokenizers"])
- class RemBertTokenizerFast(metaclass=DummyObject):
- _backends = ["tokenizers"]
- def __init__(self, *args, **kwargs):
- requires_backends(self, ["tokenizers"])
- class RobertaTokenizerFast(metaclass=DummyObject):
- _backends = ["tokenizers"]
- def __init__(self, *args, **kwargs):
- requires_backends(self, ["tokenizers"])
- class RoFormerTokenizerFast(metaclass=DummyObject):
- _backends = ["tokenizers"]
- def __init__(self, *args, **kwargs):
- requires_backends(self, ["tokenizers"])
- class SeamlessM4TTokenizerFast(metaclass=DummyObject):
- _backends = ["tokenizers"]
- def __init__(self, *args, **kwargs):
- requires_backends(self, ["tokenizers"])
- class SplinterTokenizerFast(metaclass=DummyObject):
- _backends = ["tokenizers"]
- def __init__(self, *args, **kwargs):
- requires_backends(self, ["tokenizers"])
- class SqueezeBertTokenizerFast(metaclass=DummyObject):
- _backends = ["tokenizers"]
- def __init__(self, *args, **kwargs):
- requires_backends(self, ["tokenizers"])
- class T5TokenizerFast(metaclass=DummyObject):
- _backends = ["tokenizers"]
- def __init__(self, *args, **kwargs):
- requires_backends(self, ["tokenizers"])
- class UdopTokenizerFast(metaclass=DummyObject):
- _backends = ["tokenizers"]
- def __init__(self, *args, **kwargs):
- requires_backends(self, ["tokenizers"])
- class WhisperTokenizerFast(metaclass=DummyObject):
- _backends = ["tokenizers"]
- def __init__(self, *args, **kwargs):
- requires_backends(self, ["tokenizers"])
- class XGLMTokenizerFast(metaclass=DummyObject):
- _backends = ["tokenizers"]
- def __init__(self, *args, **kwargs):
- requires_backends(self, ["tokenizers"])
- class XLMRobertaTokenizerFast(metaclass=DummyObject):
- _backends = ["tokenizers"]
- def __init__(self, *args, **kwargs):
- requires_backends(self, ["tokenizers"])
- class XLNetTokenizerFast(metaclass=DummyObject):
- _backends = ["tokenizers"]
- def __init__(self, *args, **kwargs):
- requires_backends(self, ["tokenizers"])
- class PreTrainedTokenizerFast(metaclass=DummyObject):
- _backends = ["tokenizers"]
- def __init__(self, *args, **kwargs):
- requires_backends(self, ["tokenizers"])
|