processing_auto.py 17 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372
  1. # coding=utf-8
  2. # Copyright 2021 The HuggingFace Inc. team.
  3. #
  4. # Licensed under the Apache License, Version 2.0 (the "License");
  5. # you may not use this file except in compliance with the License.
  6. # You may obtain a copy of the License at
  7. #
  8. # http://www.apache.org/licenses/LICENSE-2.0
  9. #
  10. # Unless required by applicable law or agreed to in writing, software
  11. # distributed under the License is distributed on an "AS IS" BASIS,
  12. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  13. # See the License for the specific language governing permissions and
  14. # limitations under the License.
  15. """AutoProcessor class."""
  16. import importlib
  17. import inspect
  18. import json
  19. import os
  20. import warnings
  21. from collections import OrderedDict
  22. # Build the list of all feature extractors
  23. from ...configuration_utils import PretrainedConfig
  24. from ...dynamic_module_utils import get_class_from_dynamic_module, resolve_trust_remote_code
  25. from ...feature_extraction_utils import FeatureExtractionMixin
  26. from ...image_processing_utils import ImageProcessingMixin
  27. from ...processing_utils import ProcessorMixin
  28. from ...tokenization_utils import TOKENIZER_CONFIG_FILE
  29. from ...utils import FEATURE_EXTRACTOR_NAME, PROCESSOR_NAME, get_file_from_repo, logging
  30. from .auto_factory import _LazyAutoMapping
  31. from .configuration_auto import (
  32. CONFIG_MAPPING_NAMES,
  33. AutoConfig,
  34. model_type_to_module_name,
  35. replace_list_option_in_docstrings,
  36. )
  37. from .feature_extraction_auto import AutoFeatureExtractor
  38. from .image_processing_auto import AutoImageProcessor
  39. from .tokenization_auto import AutoTokenizer
  40. logger = logging.get_logger(__name__)
  41. PROCESSOR_MAPPING_NAMES = OrderedDict(
  42. [
  43. ("align", "AlignProcessor"),
  44. ("altclip", "AltCLIPProcessor"),
  45. ("bark", "BarkProcessor"),
  46. ("blip", "BlipProcessor"),
  47. ("blip-2", "Blip2Processor"),
  48. ("bridgetower", "BridgeTowerProcessor"),
  49. ("chameleon", "ChameleonProcessor"),
  50. ("chinese_clip", "ChineseCLIPProcessor"),
  51. ("clap", "ClapProcessor"),
  52. ("clip", "CLIPProcessor"),
  53. ("clipseg", "CLIPSegProcessor"),
  54. ("clvp", "ClvpProcessor"),
  55. ("flava", "FlavaProcessor"),
  56. ("fuyu", "FuyuProcessor"),
  57. ("git", "GitProcessor"),
  58. ("grounding-dino", "GroundingDinoProcessor"),
  59. ("groupvit", "CLIPProcessor"),
  60. ("hubert", "Wav2Vec2Processor"),
  61. ("idefics", "IdeficsProcessor"),
  62. ("idefics2", "Idefics2Processor"),
  63. ("idefics3", "Idefics3Processor"),
  64. ("instructblip", "InstructBlipProcessor"),
  65. ("instructblipvideo", "InstructBlipVideoProcessor"),
  66. ("kosmos-2", "Kosmos2Processor"),
  67. ("layoutlmv2", "LayoutLMv2Processor"),
  68. ("layoutlmv3", "LayoutLMv3Processor"),
  69. ("llava", "LlavaProcessor"),
  70. ("llava_next", "LlavaNextProcessor"),
  71. ("llava_next_video", "LlavaNextVideoProcessor"),
  72. ("llava_onevision", "LlavaOnevisionProcessor"),
  73. ("markuplm", "MarkupLMProcessor"),
  74. ("mctct", "MCTCTProcessor"),
  75. ("mgp-str", "MgpstrProcessor"),
  76. ("mllama", "MllamaProcessor"),
  77. ("oneformer", "OneFormerProcessor"),
  78. ("owlv2", "Owlv2Processor"),
  79. ("owlvit", "OwlViTProcessor"),
  80. ("paligemma", "PaliGemmaProcessor"),
  81. ("pix2struct", "Pix2StructProcessor"),
  82. ("pixtral", "PixtralProcessor"),
  83. ("pop2piano", "Pop2PianoProcessor"),
  84. ("qwen2_audio", "Qwen2AudioProcessor"),
  85. ("qwen2_vl", "Qwen2VLProcessor"),
  86. ("sam", "SamProcessor"),
  87. ("seamless_m4t", "SeamlessM4TProcessor"),
  88. ("sew", "Wav2Vec2Processor"),
  89. ("sew-d", "Wav2Vec2Processor"),
  90. ("siglip", "SiglipProcessor"),
  91. ("speech_to_text", "Speech2TextProcessor"),
  92. ("speech_to_text_2", "Speech2Text2Processor"),
  93. ("speecht5", "SpeechT5Processor"),
  94. ("trocr", "TrOCRProcessor"),
  95. ("tvlt", "TvltProcessor"),
  96. ("tvp", "TvpProcessor"),
  97. ("udop", "UdopProcessor"),
  98. ("unispeech", "Wav2Vec2Processor"),
  99. ("unispeech-sat", "Wav2Vec2Processor"),
  100. ("video_llava", "VideoLlavaProcessor"),
  101. ("vilt", "ViltProcessor"),
  102. ("vipllava", "LlavaProcessor"),
  103. ("vision-text-dual-encoder", "VisionTextDualEncoderProcessor"),
  104. ("wav2vec2", "Wav2Vec2Processor"),
  105. ("wav2vec2-bert", "Wav2Vec2Processor"),
  106. ("wav2vec2-conformer", "Wav2Vec2Processor"),
  107. ("wavlm", "Wav2Vec2Processor"),
  108. ("whisper", "WhisperProcessor"),
  109. ("xclip", "XCLIPProcessor"),
  110. ]
  111. )
  112. PROCESSOR_MAPPING = _LazyAutoMapping(CONFIG_MAPPING_NAMES, PROCESSOR_MAPPING_NAMES)
  113. def processor_class_from_name(class_name: str):
  114. for module_name, processors in PROCESSOR_MAPPING_NAMES.items():
  115. if class_name in processors:
  116. module_name = model_type_to_module_name(module_name)
  117. module = importlib.import_module(f".{module_name}", "transformers.models")
  118. try:
  119. return getattr(module, class_name)
  120. except AttributeError:
  121. continue
  122. for processor in PROCESSOR_MAPPING._extra_content.values():
  123. if getattr(processor, "__name__", None) == class_name:
  124. return processor
  125. # We did not fine the class, but maybe it's because a dep is missing. In that case, the class will be in the main
  126. # init and we return the proper dummy to get an appropriate error message.
  127. main_module = importlib.import_module("transformers")
  128. if hasattr(main_module, class_name):
  129. return getattr(main_module, class_name)
  130. return None
  131. class AutoProcessor:
  132. r"""
  133. This is a generic processor class that will be instantiated as one of the processor classes of the library when
  134. created with the [`AutoProcessor.from_pretrained`] class method.
  135. This class cannot be instantiated directly using `__init__()` (throws an error).
  136. """
  137. def __init__(self):
  138. raise EnvironmentError(
  139. "AutoProcessor is designed to be instantiated "
  140. "using the `AutoProcessor.from_pretrained(pretrained_model_name_or_path)` method."
  141. )
  142. @classmethod
  143. @replace_list_option_in_docstrings(PROCESSOR_MAPPING_NAMES)
  144. def from_pretrained(cls, pretrained_model_name_or_path, **kwargs):
  145. r"""
  146. Instantiate one of the processor classes of the library from a pretrained model vocabulary.
  147. The processor class to instantiate is selected based on the `model_type` property of the config object (either
  148. passed as an argument or loaded from `pretrained_model_name_or_path` if possible):
  149. List options
  150. Params:
  151. pretrained_model_name_or_path (`str` or `os.PathLike`):
  152. This can be either:
  153. - a string, the *model id* of a pretrained feature_extractor hosted inside a model repo on
  154. huggingface.co.
  155. - a path to a *directory* containing a processor files saved using the `save_pretrained()` method,
  156. e.g., `./my_model_directory/`.
  157. cache_dir (`str` or `os.PathLike`, *optional*):
  158. Path to a directory in which a downloaded pretrained model feature extractor should be cached if the
  159. standard cache should not be used.
  160. force_download (`bool`, *optional*, defaults to `False`):
  161. Whether or not to force to (re-)download the feature extractor files and override the cached versions
  162. if they exist.
  163. resume_download:
  164. Deprecated and ignored. All downloads are now resumed by default when possible.
  165. Will be removed in v5 of Transformers.
  166. proxies (`Dict[str, str]`, *optional*):
  167. A dictionary of proxy servers to use by protocol or endpoint, e.g., `{'http': 'foo.bar:3128',
  168. 'http://hostname': 'foo.bar:4012'}.` The proxies are used on each request.
  169. token (`str` or *bool*, *optional*):
  170. The token to use as HTTP bearer authorization for remote files. If `True`, will use the token generated
  171. when running `huggingface-cli login` (stored in `~/.huggingface`).
  172. revision (`str`, *optional*, defaults to `"main"`):
  173. The specific model version to use. It can be a branch name, a tag name, or a commit id, since we use a
  174. git-based system for storing models and other artifacts on huggingface.co, so `revision` can be any
  175. identifier allowed by git.
  176. return_unused_kwargs (`bool`, *optional*, defaults to `False`):
  177. If `False`, then this function returns just the final feature extractor object. If `True`, then this
  178. functions returns a `Tuple(feature_extractor, unused_kwargs)` where *unused_kwargs* is a dictionary
  179. consisting of the key/value pairs whose keys are not feature extractor attributes: i.e., the part of
  180. `kwargs` which has not been used to update `feature_extractor` and is otherwise ignored.
  181. trust_remote_code (`bool`, *optional*, defaults to `False`):
  182. Whether or not to allow for custom models defined on the Hub in their own modeling files. This option
  183. should only be set to `True` for repositories you trust and in which you have read the code, as it will
  184. execute code present on the Hub on your local machine.
  185. kwargs (`Dict[str, Any]`, *optional*):
  186. The values in kwargs of any keys which are feature extractor attributes will be used to override the
  187. loaded values. Behavior concerning key/value pairs whose keys are *not* feature extractor attributes is
  188. controlled by the `return_unused_kwargs` keyword parameter.
  189. <Tip>
  190. Passing `token=True` is required when you want to use a private model.
  191. </Tip>
  192. Examples:
  193. ```python
  194. >>> from transformers import AutoProcessor
  195. >>> # Download processor from huggingface.co and cache.
  196. >>> processor = AutoProcessor.from_pretrained("facebook/wav2vec2-base-960h")
  197. >>> # If processor files are in a directory (e.g. processor was saved using *save_pretrained('./test/saved_model/')*)
  198. >>> # processor = AutoProcessor.from_pretrained("./test/saved_model/")
  199. ```"""
  200. use_auth_token = kwargs.pop("use_auth_token", None)
  201. if use_auth_token is not None:
  202. warnings.warn(
  203. "The `use_auth_token` argument is deprecated and will be removed in v5 of Transformers. Please use `token` instead.",
  204. FutureWarning,
  205. )
  206. if kwargs.get("token", None) is not None:
  207. raise ValueError(
  208. "`token` and `use_auth_token` are both specified. Please set only the argument `token`."
  209. )
  210. kwargs["token"] = use_auth_token
  211. config = kwargs.pop("config", None)
  212. trust_remote_code = kwargs.pop("trust_remote_code", None)
  213. kwargs["_from_auto"] = True
  214. processor_class = None
  215. processor_auto_map = None
  216. # First, let's see if we have a processor or preprocessor config.
  217. # Filter the kwargs for `get_file_from_repo`.
  218. get_file_from_repo_kwargs = {
  219. key: kwargs[key] for key in inspect.signature(get_file_from_repo).parameters.keys() if key in kwargs
  220. }
  221. # Let's start by checking whether the processor class is saved in a processor config
  222. processor_config_file = get_file_from_repo(
  223. pretrained_model_name_or_path, PROCESSOR_NAME, **get_file_from_repo_kwargs
  224. )
  225. if processor_config_file is not None:
  226. config_dict, _ = ProcessorMixin.get_processor_dict(pretrained_model_name_or_path, **kwargs)
  227. processor_class = config_dict.get("processor_class", None)
  228. if "AutoProcessor" in config_dict.get("auto_map", {}):
  229. processor_auto_map = config_dict["auto_map"]["AutoProcessor"]
  230. if processor_class is None:
  231. # If not found, let's check whether the processor class is saved in an image processor config
  232. preprocessor_config_file = get_file_from_repo(
  233. pretrained_model_name_or_path, FEATURE_EXTRACTOR_NAME, **get_file_from_repo_kwargs
  234. )
  235. if preprocessor_config_file is not None:
  236. config_dict, _ = ImageProcessingMixin.get_image_processor_dict(pretrained_model_name_or_path, **kwargs)
  237. processor_class = config_dict.get("processor_class", None)
  238. if "AutoProcessor" in config_dict.get("auto_map", {}):
  239. processor_auto_map = config_dict["auto_map"]["AutoProcessor"]
  240. # If not found, let's check whether the processor class is saved in a feature extractor config
  241. if preprocessor_config_file is not None and processor_class is None:
  242. config_dict, _ = FeatureExtractionMixin.get_feature_extractor_dict(
  243. pretrained_model_name_or_path, **kwargs
  244. )
  245. processor_class = config_dict.get("processor_class", None)
  246. if "AutoProcessor" in config_dict.get("auto_map", {}):
  247. processor_auto_map = config_dict["auto_map"]["AutoProcessor"]
  248. if processor_class is None:
  249. # Next, let's check whether the processor class is saved in a tokenizer
  250. tokenizer_config_file = get_file_from_repo(
  251. pretrained_model_name_or_path, TOKENIZER_CONFIG_FILE, **get_file_from_repo_kwargs
  252. )
  253. if tokenizer_config_file is not None:
  254. with open(tokenizer_config_file, encoding="utf-8") as reader:
  255. config_dict = json.load(reader)
  256. processor_class = config_dict.get("processor_class", None)
  257. if "AutoProcessor" in config_dict.get("auto_map", {}):
  258. processor_auto_map = config_dict["auto_map"]["AutoProcessor"]
  259. if processor_class is None:
  260. # Otherwise, load config, if it can be loaded.
  261. if not isinstance(config, PretrainedConfig):
  262. config = AutoConfig.from_pretrained(
  263. pretrained_model_name_or_path, trust_remote_code=trust_remote_code, **kwargs
  264. )
  265. # And check if the config contains the processor class.
  266. processor_class = getattr(config, "processor_class", None)
  267. if hasattr(config, "auto_map") and "AutoProcessor" in config.auto_map:
  268. processor_auto_map = config.auto_map["AutoProcessor"]
  269. if processor_class is not None:
  270. processor_class = processor_class_from_name(processor_class)
  271. has_remote_code = processor_auto_map is not None
  272. has_local_code = processor_class is not None or type(config) in PROCESSOR_MAPPING
  273. trust_remote_code = resolve_trust_remote_code(
  274. trust_remote_code, pretrained_model_name_or_path, has_local_code, has_remote_code
  275. )
  276. if has_remote_code and trust_remote_code:
  277. processor_class = get_class_from_dynamic_module(
  278. processor_auto_map, pretrained_model_name_or_path, **kwargs
  279. )
  280. _ = kwargs.pop("code_revision", None)
  281. if os.path.isdir(pretrained_model_name_or_path):
  282. processor_class.register_for_auto_class()
  283. return processor_class.from_pretrained(
  284. pretrained_model_name_or_path, trust_remote_code=trust_remote_code, **kwargs
  285. )
  286. elif processor_class is not None:
  287. return processor_class.from_pretrained(
  288. pretrained_model_name_or_path, trust_remote_code=trust_remote_code, **kwargs
  289. )
  290. # Last try: we use the PROCESSOR_MAPPING.
  291. elif type(config) in PROCESSOR_MAPPING:
  292. return PROCESSOR_MAPPING[type(config)].from_pretrained(pretrained_model_name_or_path, **kwargs)
  293. # At this stage, there doesn't seem to be a `Processor` class available for this model, so let's try a
  294. # tokenizer.
  295. try:
  296. return AutoTokenizer.from_pretrained(
  297. pretrained_model_name_or_path, trust_remote_code=trust_remote_code, **kwargs
  298. )
  299. except Exception:
  300. try:
  301. return AutoImageProcessor.from_pretrained(
  302. pretrained_model_name_or_path, trust_remote_code=trust_remote_code, **kwargs
  303. )
  304. except Exception:
  305. pass
  306. try:
  307. return AutoFeatureExtractor.from_pretrained(
  308. pretrained_model_name_or_path, trust_remote_code=trust_remote_code, **kwargs
  309. )
  310. except Exception:
  311. pass
  312. raise ValueError(
  313. f"Unrecognized processing class in {pretrained_model_name_or_path}. Can't instantiate a processor, a "
  314. "tokenizer, an image processor or a feature extractor for this model. Make sure the repository contains "
  315. "the files of at least one of those processing classes."
  316. )
  317. @staticmethod
  318. def register(config_class, processor_class, exist_ok=False):
  319. """
  320. Register a new processor for this class.
  321. Args:
  322. config_class ([`PretrainedConfig`]):
  323. The configuration corresponding to the model to register.
  324. processor_class ([`FeatureExtractorMixin`]): The processor to register.
  325. """
  326. PROCESSOR_MAPPING.register(config_class, processor_class, exist_ok=exist_ok)