dummy_tokenizers_objects.py 11 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457
  1. # This file is autogenerated by the command `make fix-copies`, do not edit.
  2. from ..utils import DummyObject, requires_backends
  3. class AlbertTokenizerFast(metaclass=DummyObject):
  4. _backends = ["tokenizers"]
  5. def __init__(self, *args, **kwargs):
  6. requires_backends(self, ["tokenizers"])
  7. class BartTokenizerFast(metaclass=DummyObject):
  8. _backends = ["tokenizers"]
  9. def __init__(self, *args, **kwargs):
  10. requires_backends(self, ["tokenizers"])
  11. class BarthezTokenizerFast(metaclass=DummyObject):
  12. _backends = ["tokenizers"]
  13. def __init__(self, *args, **kwargs):
  14. requires_backends(self, ["tokenizers"])
  15. class BertTokenizerFast(metaclass=DummyObject):
  16. _backends = ["tokenizers"]
  17. def __init__(self, *args, **kwargs):
  18. requires_backends(self, ["tokenizers"])
  19. class BigBirdTokenizerFast(metaclass=DummyObject):
  20. _backends = ["tokenizers"]
  21. def __init__(self, *args, **kwargs):
  22. requires_backends(self, ["tokenizers"])
  23. class BlenderbotTokenizerFast(metaclass=DummyObject):
  24. _backends = ["tokenizers"]
  25. def __init__(self, *args, **kwargs):
  26. requires_backends(self, ["tokenizers"])
  27. class BlenderbotSmallTokenizerFast(metaclass=DummyObject):
  28. _backends = ["tokenizers"]
  29. def __init__(self, *args, **kwargs):
  30. requires_backends(self, ["tokenizers"])
  31. class BloomTokenizerFast(metaclass=DummyObject):
  32. _backends = ["tokenizers"]
  33. def __init__(self, *args, **kwargs):
  34. requires_backends(self, ["tokenizers"])
  35. class CamembertTokenizerFast(metaclass=DummyObject):
  36. _backends = ["tokenizers"]
  37. def __init__(self, *args, **kwargs):
  38. requires_backends(self, ["tokenizers"])
  39. class CLIPTokenizerFast(metaclass=DummyObject):
  40. _backends = ["tokenizers"]
  41. def __init__(self, *args, **kwargs):
  42. requires_backends(self, ["tokenizers"])
  43. class CodeLlamaTokenizerFast(metaclass=DummyObject):
  44. _backends = ["tokenizers"]
  45. def __init__(self, *args, **kwargs):
  46. requires_backends(self, ["tokenizers"])
  47. class CodeGenTokenizerFast(metaclass=DummyObject):
  48. _backends = ["tokenizers"]
  49. def __init__(self, *args, **kwargs):
  50. requires_backends(self, ["tokenizers"])
  51. class CohereTokenizerFast(metaclass=DummyObject):
  52. _backends = ["tokenizers"]
  53. def __init__(self, *args, **kwargs):
  54. requires_backends(self, ["tokenizers"])
  55. class ConvBertTokenizerFast(metaclass=DummyObject):
  56. _backends = ["tokenizers"]
  57. def __init__(self, *args, **kwargs):
  58. requires_backends(self, ["tokenizers"])
  59. class CpmTokenizerFast(metaclass=DummyObject):
  60. _backends = ["tokenizers"]
  61. def __init__(self, *args, **kwargs):
  62. requires_backends(self, ["tokenizers"])
  63. class DebertaTokenizerFast(metaclass=DummyObject):
  64. _backends = ["tokenizers"]
  65. def __init__(self, *args, **kwargs):
  66. requires_backends(self, ["tokenizers"])
  67. class DebertaV2TokenizerFast(metaclass=DummyObject):
  68. _backends = ["tokenizers"]
  69. def __init__(self, *args, **kwargs):
  70. requires_backends(self, ["tokenizers"])
  71. class RealmTokenizerFast(metaclass=DummyObject):
  72. _backends = ["tokenizers"]
  73. def __init__(self, *args, **kwargs):
  74. requires_backends(self, ["tokenizers"])
  75. class RetriBertTokenizerFast(metaclass=DummyObject):
  76. _backends = ["tokenizers"]
  77. def __init__(self, *args, **kwargs):
  78. requires_backends(self, ["tokenizers"])
  79. class DistilBertTokenizerFast(metaclass=DummyObject):
  80. _backends = ["tokenizers"]
  81. def __init__(self, *args, **kwargs):
  82. requires_backends(self, ["tokenizers"])
  83. class DPRContextEncoderTokenizerFast(metaclass=DummyObject):
  84. _backends = ["tokenizers"]
  85. def __init__(self, *args, **kwargs):
  86. requires_backends(self, ["tokenizers"])
  87. class DPRQuestionEncoderTokenizerFast(metaclass=DummyObject):
  88. _backends = ["tokenizers"]
  89. def __init__(self, *args, **kwargs):
  90. requires_backends(self, ["tokenizers"])
  91. class DPRReaderTokenizerFast(metaclass=DummyObject):
  92. _backends = ["tokenizers"]
  93. def __init__(self, *args, **kwargs):
  94. requires_backends(self, ["tokenizers"])
  95. class ElectraTokenizerFast(metaclass=DummyObject):
  96. _backends = ["tokenizers"]
  97. def __init__(self, *args, **kwargs):
  98. requires_backends(self, ["tokenizers"])
  99. class FNetTokenizerFast(metaclass=DummyObject):
  100. _backends = ["tokenizers"]
  101. def __init__(self, *args, **kwargs):
  102. requires_backends(self, ["tokenizers"])
  103. class FunnelTokenizerFast(metaclass=DummyObject):
  104. _backends = ["tokenizers"]
  105. def __init__(self, *args, **kwargs):
  106. requires_backends(self, ["tokenizers"])
  107. class GemmaTokenizerFast(metaclass=DummyObject):
  108. _backends = ["tokenizers"]
  109. def __init__(self, *args, **kwargs):
  110. requires_backends(self, ["tokenizers"])
  111. class GPT2TokenizerFast(metaclass=DummyObject):
  112. _backends = ["tokenizers"]
  113. def __init__(self, *args, **kwargs):
  114. requires_backends(self, ["tokenizers"])
  115. class GPTNeoXTokenizerFast(metaclass=DummyObject):
  116. _backends = ["tokenizers"]
  117. def __init__(self, *args, **kwargs):
  118. requires_backends(self, ["tokenizers"])
  119. class GPTNeoXJapaneseTokenizer(metaclass=DummyObject):
  120. _backends = ["tokenizers"]
  121. def __init__(self, *args, **kwargs):
  122. requires_backends(self, ["tokenizers"])
  123. class HerbertTokenizerFast(metaclass=DummyObject):
  124. _backends = ["tokenizers"]
  125. def __init__(self, *args, **kwargs):
  126. requires_backends(self, ["tokenizers"])
  127. class LayoutLMTokenizerFast(metaclass=DummyObject):
  128. _backends = ["tokenizers"]
  129. def __init__(self, *args, **kwargs):
  130. requires_backends(self, ["tokenizers"])
  131. class LayoutLMv2TokenizerFast(metaclass=DummyObject):
  132. _backends = ["tokenizers"]
  133. def __init__(self, *args, **kwargs):
  134. requires_backends(self, ["tokenizers"])
  135. class LayoutLMv3TokenizerFast(metaclass=DummyObject):
  136. _backends = ["tokenizers"]
  137. def __init__(self, *args, **kwargs):
  138. requires_backends(self, ["tokenizers"])
  139. class LayoutXLMTokenizerFast(metaclass=DummyObject):
  140. _backends = ["tokenizers"]
  141. def __init__(self, *args, **kwargs):
  142. requires_backends(self, ["tokenizers"])
  143. class LEDTokenizerFast(metaclass=DummyObject):
  144. _backends = ["tokenizers"]
  145. def __init__(self, *args, **kwargs):
  146. requires_backends(self, ["tokenizers"])
  147. class LlamaTokenizerFast(metaclass=DummyObject):
  148. _backends = ["tokenizers"]
  149. def __init__(self, *args, **kwargs):
  150. requires_backends(self, ["tokenizers"])
  151. class LongformerTokenizerFast(metaclass=DummyObject):
  152. _backends = ["tokenizers"]
  153. def __init__(self, *args, **kwargs):
  154. requires_backends(self, ["tokenizers"])
  155. class LxmertTokenizerFast(metaclass=DummyObject):
  156. _backends = ["tokenizers"]
  157. def __init__(self, *args, **kwargs):
  158. requires_backends(self, ["tokenizers"])
  159. class MarkupLMTokenizerFast(metaclass=DummyObject):
  160. _backends = ["tokenizers"]
  161. def __init__(self, *args, **kwargs):
  162. requires_backends(self, ["tokenizers"])
  163. class MBartTokenizerFast(metaclass=DummyObject):
  164. _backends = ["tokenizers"]
  165. def __init__(self, *args, **kwargs):
  166. requires_backends(self, ["tokenizers"])
  167. class MBart50TokenizerFast(metaclass=DummyObject):
  168. _backends = ["tokenizers"]
  169. def __init__(self, *args, **kwargs):
  170. requires_backends(self, ["tokenizers"])
  171. class MobileBertTokenizerFast(metaclass=DummyObject):
  172. _backends = ["tokenizers"]
  173. def __init__(self, *args, **kwargs):
  174. requires_backends(self, ["tokenizers"])
  175. class MPNetTokenizerFast(metaclass=DummyObject):
  176. _backends = ["tokenizers"]
  177. def __init__(self, *args, **kwargs):
  178. requires_backends(self, ["tokenizers"])
  179. class MT5TokenizerFast(metaclass=DummyObject):
  180. _backends = ["tokenizers"]
  181. def __init__(self, *args, **kwargs):
  182. requires_backends(self, ["tokenizers"])
  183. class MvpTokenizerFast(metaclass=DummyObject):
  184. _backends = ["tokenizers"]
  185. def __init__(self, *args, **kwargs):
  186. requires_backends(self, ["tokenizers"])
  187. class NllbTokenizerFast(metaclass=DummyObject):
  188. _backends = ["tokenizers"]
  189. def __init__(self, *args, **kwargs):
  190. requires_backends(self, ["tokenizers"])
  191. class NougatTokenizerFast(metaclass=DummyObject):
  192. _backends = ["tokenizers"]
  193. def __init__(self, *args, **kwargs):
  194. requires_backends(self, ["tokenizers"])
  195. class OpenAIGPTTokenizerFast(metaclass=DummyObject):
  196. _backends = ["tokenizers"]
  197. def __init__(self, *args, **kwargs):
  198. requires_backends(self, ["tokenizers"])
  199. class PegasusTokenizerFast(metaclass=DummyObject):
  200. _backends = ["tokenizers"]
  201. def __init__(self, *args, **kwargs):
  202. requires_backends(self, ["tokenizers"])
  203. class Qwen2TokenizerFast(metaclass=DummyObject):
  204. _backends = ["tokenizers"]
  205. def __init__(self, *args, **kwargs):
  206. requires_backends(self, ["tokenizers"])
  207. class ReformerTokenizerFast(metaclass=DummyObject):
  208. _backends = ["tokenizers"]
  209. def __init__(self, *args, **kwargs):
  210. requires_backends(self, ["tokenizers"])
  211. class RemBertTokenizerFast(metaclass=DummyObject):
  212. _backends = ["tokenizers"]
  213. def __init__(self, *args, **kwargs):
  214. requires_backends(self, ["tokenizers"])
  215. class RobertaTokenizerFast(metaclass=DummyObject):
  216. _backends = ["tokenizers"]
  217. def __init__(self, *args, **kwargs):
  218. requires_backends(self, ["tokenizers"])
  219. class RoFormerTokenizerFast(metaclass=DummyObject):
  220. _backends = ["tokenizers"]
  221. def __init__(self, *args, **kwargs):
  222. requires_backends(self, ["tokenizers"])
  223. class SeamlessM4TTokenizerFast(metaclass=DummyObject):
  224. _backends = ["tokenizers"]
  225. def __init__(self, *args, **kwargs):
  226. requires_backends(self, ["tokenizers"])
  227. class SplinterTokenizerFast(metaclass=DummyObject):
  228. _backends = ["tokenizers"]
  229. def __init__(self, *args, **kwargs):
  230. requires_backends(self, ["tokenizers"])
  231. class SqueezeBertTokenizerFast(metaclass=DummyObject):
  232. _backends = ["tokenizers"]
  233. def __init__(self, *args, **kwargs):
  234. requires_backends(self, ["tokenizers"])
  235. class T5TokenizerFast(metaclass=DummyObject):
  236. _backends = ["tokenizers"]
  237. def __init__(self, *args, **kwargs):
  238. requires_backends(self, ["tokenizers"])
  239. class UdopTokenizerFast(metaclass=DummyObject):
  240. _backends = ["tokenizers"]
  241. def __init__(self, *args, **kwargs):
  242. requires_backends(self, ["tokenizers"])
  243. class WhisperTokenizerFast(metaclass=DummyObject):
  244. _backends = ["tokenizers"]
  245. def __init__(self, *args, **kwargs):
  246. requires_backends(self, ["tokenizers"])
  247. class XGLMTokenizerFast(metaclass=DummyObject):
  248. _backends = ["tokenizers"]
  249. def __init__(self, *args, **kwargs):
  250. requires_backends(self, ["tokenizers"])
  251. class XLMRobertaTokenizerFast(metaclass=DummyObject):
  252. _backends = ["tokenizers"]
  253. def __init__(self, *args, **kwargs):
  254. requires_backends(self, ["tokenizers"])
  255. class XLNetTokenizerFast(metaclass=DummyObject):
  256. _backends = ["tokenizers"]
  257. def __init__(self, *args, **kwargs):
  258. requires_backends(self, ["tokenizers"])
  259. class PreTrainedTokenizerFast(metaclass=DummyObject):
  260. _backends = ["tokenizers"]
  261. def __init__(self, *args, **kwargs):
  262. requires_backends(self, ["tokenizers"])