dummy_sentencepiece_objects.py 6.3 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254
  1. # This file is autogenerated by the command `make fix-copies`, do not edit.
  2. from ..utils import DummyObject, requires_backends
  3. class AlbertTokenizer(metaclass=DummyObject):
  4. _backends = ["sentencepiece"]
  5. def __init__(self, *args, **kwargs):
  6. requires_backends(self, ["sentencepiece"])
  7. class BarthezTokenizer(metaclass=DummyObject):
  8. _backends = ["sentencepiece"]
  9. def __init__(self, *args, **kwargs):
  10. requires_backends(self, ["sentencepiece"])
  11. class BartphoTokenizer(metaclass=DummyObject):
  12. _backends = ["sentencepiece"]
  13. def __init__(self, *args, **kwargs):
  14. requires_backends(self, ["sentencepiece"])
  15. class BertGenerationTokenizer(metaclass=DummyObject):
  16. _backends = ["sentencepiece"]
  17. def __init__(self, *args, **kwargs):
  18. requires_backends(self, ["sentencepiece"])
  19. class BigBirdTokenizer(metaclass=DummyObject):
  20. _backends = ["sentencepiece"]
  21. def __init__(self, *args, **kwargs):
  22. requires_backends(self, ["sentencepiece"])
  23. class CamembertTokenizer(metaclass=DummyObject):
  24. _backends = ["sentencepiece"]
  25. def __init__(self, *args, **kwargs):
  26. requires_backends(self, ["sentencepiece"])
  27. class CodeLlamaTokenizer(metaclass=DummyObject):
  28. _backends = ["sentencepiece"]
  29. def __init__(self, *args, **kwargs):
  30. requires_backends(self, ["sentencepiece"])
  31. class CpmTokenizer(metaclass=DummyObject):
  32. _backends = ["sentencepiece"]
  33. def __init__(self, *args, **kwargs):
  34. requires_backends(self, ["sentencepiece"])
  35. class DebertaV2Tokenizer(metaclass=DummyObject):
  36. _backends = ["sentencepiece"]
  37. def __init__(self, *args, **kwargs):
  38. requires_backends(self, ["sentencepiece"])
  39. class ErnieMTokenizer(metaclass=DummyObject):
  40. _backends = ["sentencepiece"]
  41. def __init__(self, *args, **kwargs):
  42. requires_backends(self, ["sentencepiece"])
  43. class XLMProphetNetTokenizer(metaclass=DummyObject):
  44. _backends = ["sentencepiece"]
  45. def __init__(self, *args, **kwargs):
  46. requires_backends(self, ["sentencepiece"])
  47. class FNetTokenizer(metaclass=DummyObject):
  48. _backends = ["sentencepiece"]
  49. def __init__(self, *args, **kwargs):
  50. requires_backends(self, ["sentencepiece"])
  51. class GemmaTokenizer(metaclass=DummyObject):
  52. _backends = ["sentencepiece"]
  53. def __init__(self, *args, **kwargs):
  54. requires_backends(self, ["sentencepiece"])
  55. class GPTSw3Tokenizer(metaclass=DummyObject):
  56. _backends = ["sentencepiece"]
  57. def __init__(self, *args, **kwargs):
  58. requires_backends(self, ["sentencepiece"])
  59. class LayoutXLMTokenizer(metaclass=DummyObject):
  60. _backends = ["sentencepiece"]
  61. def __init__(self, *args, **kwargs):
  62. requires_backends(self, ["sentencepiece"])
  63. class LlamaTokenizer(metaclass=DummyObject):
  64. _backends = ["sentencepiece"]
  65. def __init__(self, *args, **kwargs):
  66. requires_backends(self, ["sentencepiece"])
  67. class M2M100Tokenizer(metaclass=DummyObject):
  68. _backends = ["sentencepiece"]
  69. def __init__(self, *args, **kwargs):
  70. requires_backends(self, ["sentencepiece"])
  71. class MarianTokenizer(metaclass=DummyObject):
  72. _backends = ["sentencepiece"]
  73. def __init__(self, *args, **kwargs):
  74. requires_backends(self, ["sentencepiece"])
  75. class MBartTokenizer(metaclass=DummyObject):
  76. _backends = ["sentencepiece"]
  77. def __init__(self, *args, **kwargs):
  78. requires_backends(self, ["sentencepiece"])
  79. class MBart50Tokenizer(metaclass=DummyObject):
  80. _backends = ["sentencepiece"]
  81. def __init__(self, *args, **kwargs):
  82. requires_backends(self, ["sentencepiece"])
  83. class MLukeTokenizer(metaclass=DummyObject):
  84. _backends = ["sentencepiece"]
  85. def __init__(self, *args, **kwargs):
  86. requires_backends(self, ["sentencepiece"])
  87. class MT5Tokenizer(metaclass=DummyObject):
  88. _backends = ["sentencepiece"]
  89. def __init__(self, *args, **kwargs):
  90. requires_backends(self, ["sentencepiece"])
  91. class NllbTokenizer(metaclass=DummyObject):
  92. _backends = ["sentencepiece"]
  93. def __init__(self, *args, **kwargs):
  94. requires_backends(self, ["sentencepiece"])
  95. class PegasusTokenizer(metaclass=DummyObject):
  96. _backends = ["sentencepiece"]
  97. def __init__(self, *args, **kwargs):
  98. requires_backends(self, ["sentencepiece"])
  99. class PLBartTokenizer(metaclass=DummyObject):
  100. _backends = ["sentencepiece"]
  101. def __init__(self, *args, **kwargs):
  102. requires_backends(self, ["sentencepiece"])
  103. class ReformerTokenizer(metaclass=DummyObject):
  104. _backends = ["sentencepiece"]
  105. def __init__(self, *args, **kwargs):
  106. requires_backends(self, ["sentencepiece"])
  107. class RemBertTokenizer(metaclass=DummyObject):
  108. _backends = ["sentencepiece"]
  109. def __init__(self, *args, **kwargs):
  110. requires_backends(self, ["sentencepiece"])
  111. class SeamlessM4TTokenizer(metaclass=DummyObject):
  112. _backends = ["sentencepiece"]
  113. def __init__(self, *args, **kwargs):
  114. requires_backends(self, ["sentencepiece"])
  115. class SiglipTokenizer(metaclass=DummyObject):
  116. _backends = ["sentencepiece"]
  117. def __init__(self, *args, **kwargs):
  118. requires_backends(self, ["sentencepiece"])
  119. class Speech2TextTokenizer(metaclass=DummyObject):
  120. _backends = ["sentencepiece"]
  121. def __init__(self, *args, **kwargs):
  122. requires_backends(self, ["sentencepiece"])
  123. class SpeechT5Tokenizer(metaclass=DummyObject):
  124. _backends = ["sentencepiece"]
  125. def __init__(self, *args, **kwargs):
  126. requires_backends(self, ["sentencepiece"])
  127. class T5Tokenizer(metaclass=DummyObject):
  128. _backends = ["sentencepiece"]
  129. def __init__(self, *args, **kwargs):
  130. requires_backends(self, ["sentencepiece"])
  131. class UdopTokenizer(metaclass=DummyObject):
  132. _backends = ["sentencepiece"]
  133. def __init__(self, *args, **kwargs):
  134. requires_backends(self, ["sentencepiece"])
  135. class XGLMTokenizer(metaclass=DummyObject):
  136. _backends = ["sentencepiece"]
  137. def __init__(self, *args, **kwargs):
  138. requires_backends(self, ["sentencepiece"])
  139. class XLMRobertaTokenizer(metaclass=DummyObject):
  140. _backends = ["sentencepiece"]
  141. def __init__(self, *args, **kwargs):
  142. requires_backends(self, ["sentencepiece"])
  143. class XLNetTokenizer(metaclass=DummyObject):
  144. _backends = ["sentencepiece"]
  145. def __init__(self, *args, **kwargs):
  146. requires_backends(self, ["sentencepiece"])