| 123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254 |
- # This file is autogenerated by the command `make fix-copies`, do not edit.
- from ..utils import DummyObject, requires_backends
- class AlbertTokenizer(metaclass=DummyObject):
- _backends = ["sentencepiece"]
- def __init__(self, *args, **kwargs):
- requires_backends(self, ["sentencepiece"])
- class BarthezTokenizer(metaclass=DummyObject):
- _backends = ["sentencepiece"]
- def __init__(self, *args, **kwargs):
- requires_backends(self, ["sentencepiece"])
- class BartphoTokenizer(metaclass=DummyObject):
- _backends = ["sentencepiece"]
- def __init__(self, *args, **kwargs):
- requires_backends(self, ["sentencepiece"])
- class BertGenerationTokenizer(metaclass=DummyObject):
- _backends = ["sentencepiece"]
- def __init__(self, *args, **kwargs):
- requires_backends(self, ["sentencepiece"])
- class BigBirdTokenizer(metaclass=DummyObject):
- _backends = ["sentencepiece"]
- def __init__(self, *args, **kwargs):
- requires_backends(self, ["sentencepiece"])
- class CamembertTokenizer(metaclass=DummyObject):
- _backends = ["sentencepiece"]
- def __init__(self, *args, **kwargs):
- requires_backends(self, ["sentencepiece"])
- class CodeLlamaTokenizer(metaclass=DummyObject):
- _backends = ["sentencepiece"]
- def __init__(self, *args, **kwargs):
- requires_backends(self, ["sentencepiece"])
- class CpmTokenizer(metaclass=DummyObject):
- _backends = ["sentencepiece"]
- def __init__(self, *args, **kwargs):
- requires_backends(self, ["sentencepiece"])
- class DebertaV2Tokenizer(metaclass=DummyObject):
- _backends = ["sentencepiece"]
- def __init__(self, *args, **kwargs):
- requires_backends(self, ["sentencepiece"])
- class ErnieMTokenizer(metaclass=DummyObject):
- _backends = ["sentencepiece"]
- def __init__(self, *args, **kwargs):
- requires_backends(self, ["sentencepiece"])
- class XLMProphetNetTokenizer(metaclass=DummyObject):
- _backends = ["sentencepiece"]
- def __init__(self, *args, **kwargs):
- requires_backends(self, ["sentencepiece"])
- class FNetTokenizer(metaclass=DummyObject):
- _backends = ["sentencepiece"]
- def __init__(self, *args, **kwargs):
- requires_backends(self, ["sentencepiece"])
- class GemmaTokenizer(metaclass=DummyObject):
- _backends = ["sentencepiece"]
- def __init__(self, *args, **kwargs):
- requires_backends(self, ["sentencepiece"])
- class GPTSw3Tokenizer(metaclass=DummyObject):
- _backends = ["sentencepiece"]
- def __init__(self, *args, **kwargs):
- requires_backends(self, ["sentencepiece"])
- class LayoutXLMTokenizer(metaclass=DummyObject):
- _backends = ["sentencepiece"]
- def __init__(self, *args, **kwargs):
- requires_backends(self, ["sentencepiece"])
- class LlamaTokenizer(metaclass=DummyObject):
- _backends = ["sentencepiece"]
- def __init__(self, *args, **kwargs):
- requires_backends(self, ["sentencepiece"])
- class M2M100Tokenizer(metaclass=DummyObject):
- _backends = ["sentencepiece"]
- def __init__(self, *args, **kwargs):
- requires_backends(self, ["sentencepiece"])
- class MarianTokenizer(metaclass=DummyObject):
- _backends = ["sentencepiece"]
- def __init__(self, *args, **kwargs):
- requires_backends(self, ["sentencepiece"])
- class MBartTokenizer(metaclass=DummyObject):
- _backends = ["sentencepiece"]
- def __init__(self, *args, **kwargs):
- requires_backends(self, ["sentencepiece"])
- class MBart50Tokenizer(metaclass=DummyObject):
- _backends = ["sentencepiece"]
- def __init__(self, *args, **kwargs):
- requires_backends(self, ["sentencepiece"])
- class MLukeTokenizer(metaclass=DummyObject):
- _backends = ["sentencepiece"]
- def __init__(self, *args, **kwargs):
- requires_backends(self, ["sentencepiece"])
- class MT5Tokenizer(metaclass=DummyObject):
- _backends = ["sentencepiece"]
- def __init__(self, *args, **kwargs):
- requires_backends(self, ["sentencepiece"])
- class NllbTokenizer(metaclass=DummyObject):
- _backends = ["sentencepiece"]
- def __init__(self, *args, **kwargs):
- requires_backends(self, ["sentencepiece"])
- class PegasusTokenizer(metaclass=DummyObject):
- _backends = ["sentencepiece"]
- def __init__(self, *args, **kwargs):
- requires_backends(self, ["sentencepiece"])
- class PLBartTokenizer(metaclass=DummyObject):
- _backends = ["sentencepiece"]
- def __init__(self, *args, **kwargs):
- requires_backends(self, ["sentencepiece"])
- class ReformerTokenizer(metaclass=DummyObject):
- _backends = ["sentencepiece"]
- def __init__(self, *args, **kwargs):
- requires_backends(self, ["sentencepiece"])
- class RemBertTokenizer(metaclass=DummyObject):
- _backends = ["sentencepiece"]
- def __init__(self, *args, **kwargs):
- requires_backends(self, ["sentencepiece"])
- class SeamlessM4TTokenizer(metaclass=DummyObject):
- _backends = ["sentencepiece"]
- def __init__(self, *args, **kwargs):
- requires_backends(self, ["sentencepiece"])
- class SiglipTokenizer(metaclass=DummyObject):
- _backends = ["sentencepiece"]
- def __init__(self, *args, **kwargs):
- requires_backends(self, ["sentencepiece"])
- class Speech2TextTokenizer(metaclass=DummyObject):
- _backends = ["sentencepiece"]
- def __init__(self, *args, **kwargs):
- requires_backends(self, ["sentencepiece"])
- class SpeechT5Tokenizer(metaclass=DummyObject):
- _backends = ["sentencepiece"]
- def __init__(self, *args, **kwargs):
- requires_backends(self, ["sentencepiece"])
- class T5Tokenizer(metaclass=DummyObject):
- _backends = ["sentencepiece"]
- def __init__(self, *args, **kwargs):
- requires_backends(self, ["sentencepiece"])
- class UdopTokenizer(metaclass=DummyObject):
- _backends = ["sentencepiece"]
- def __init__(self, *args, **kwargs):
- requires_backends(self, ["sentencepiece"])
- class XGLMTokenizer(metaclass=DummyObject):
- _backends = ["sentencepiece"]
- def __init__(self, *args, **kwargs):
- requires_backends(self, ["sentencepiece"])
- class XLMRobertaTokenizer(metaclass=DummyObject):
- _backends = ["sentencepiece"]
- def __init__(self, *args, **kwargs):
- requires_backends(self, ["sentencepiece"])
- class XLNetTokenizer(metaclass=DummyObject):
- _backends = ["sentencepiece"]
- def __init__(self, *args, **kwargs):
- requires_backends(self, ["sentencepiece"])
|