__init__.py 2.6 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081
  1. # Copyright 2022 The HuggingFace Team. All rights reserved.
  2. #
  3. # Licensed under the Apache License, Version 2.0 (the "License");
  4. # you may not use this file except in compliance with the License.
  5. # You may obtain a copy of the License at
  6. #
  7. # http://www.apache.org/licenses/LICENSE-2.0
  8. #
  9. # Unless required by applicable law or agreed to in writing, software
  10. # distributed under the License is distributed on an "AS IS" BASIS,
  11. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12. # See the License for the specific language governing permissions and
  13. # limitations under the License.
  14. from typing import TYPE_CHECKING
  15. from ...utils import OptionalDependencyNotAvailable, _LazyModule, is_tokenizers_available, is_torch_available
  16. _import_structure = {
  17. "configuration_markuplm": ["MarkupLMConfig"],
  18. "feature_extraction_markuplm": ["MarkupLMFeatureExtractor"],
  19. "processing_markuplm": ["MarkupLMProcessor"],
  20. "tokenization_markuplm": ["MarkupLMTokenizer"],
  21. }
  22. try:
  23. if not is_tokenizers_available():
  24. raise OptionalDependencyNotAvailable()
  25. except OptionalDependencyNotAvailable:
  26. pass
  27. else:
  28. _import_structure["tokenization_markuplm_fast"] = ["MarkupLMTokenizerFast"]
  29. try:
  30. if not is_torch_available():
  31. raise OptionalDependencyNotAvailable()
  32. except OptionalDependencyNotAvailable:
  33. pass
  34. else:
  35. _import_structure["modeling_markuplm"] = [
  36. "MarkupLMForQuestionAnswering",
  37. "MarkupLMForSequenceClassification",
  38. "MarkupLMForTokenClassification",
  39. "MarkupLMModel",
  40. "MarkupLMPreTrainedModel",
  41. ]
  42. if TYPE_CHECKING:
  43. from .configuration_markuplm import MarkupLMConfig
  44. from .feature_extraction_markuplm import MarkupLMFeatureExtractor
  45. from .processing_markuplm import MarkupLMProcessor
  46. from .tokenization_markuplm import MarkupLMTokenizer
  47. try:
  48. if not is_tokenizers_available():
  49. raise OptionalDependencyNotAvailable()
  50. except OptionalDependencyNotAvailable:
  51. pass
  52. else:
  53. from .tokenization_markuplm_fast import MarkupLMTokenizerFast
  54. try:
  55. if not is_torch_available():
  56. raise OptionalDependencyNotAvailable()
  57. except OptionalDependencyNotAvailable:
  58. pass
  59. else:
  60. from .modeling_markuplm import (
  61. MarkupLMForQuestionAnswering,
  62. MarkupLMForSequenceClassification,
  63. MarkupLMForTokenClassification,
  64. MarkupLMModel,
  65. MarkupLMPreTrainedModel,
  66. )
  67. else:
  68. import sys
  69. sys.modules[__name__] = _LazyModule(__name__, globals()["__file__"], _import_structure)