__init__.py 3.3 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115
  1. # Copyright 2020 The HuggingFace Team. All rights reserved.
  2. #
  3. # Licensed under the Apache License, Version 2.0 (the "License");
  4. # you may not use this file except in compliance with the License.
  5. # You may obtain a copy of the License at
  6. #
  7. # http://www.apache.org/licenses/LICENSE-2.0
  8. #
  9. # Unless required by applicable law or agreed to in writing, software
  10. # distributed under the License is distributed on an "AS IS" BASIS,
  11. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12. # See the License for the specific language governing permissions and
  13. # limitations under the License.
  14. from typing import TYPE_CHECKING
  15. from ...utils import (
  16. OptionalDependencyNotAvailable,
  17. _LazyModule,
  18. is_tf_available,
  19. is_tokenizers_available,
  20. is_torch_available,
  21. )
  22. _import_structure = {
  23. "configuration_openai": ["OpenAIGPTConfig"],
  24. "tokenization_openai": ["OpenAIGPTTokenizer"],
  25. }
  26. try:
  27. if not is_tokenizers_available():
  28. raise OptionalDependencyNotAvailable()
  29. except OptionalDependencyNotAvailable:
  30. pass
  31. else:
  32. _import_structure["tokenization_openai_fast"] = ["OpenAIGPTTokenizerFast"]
  33. try:
  34. if not is_torch_available():
  35. raise OptionalDependencyNotAvailable()
  36. except OptionalDependencyNotAvailable:
  37. pass
  38. else:
  39. _import_structure["modeling_openai"] = [
  40. "OpenAIGPTDoubleHeadsModel",
  41. "OpenAIGPTForSequenceClassification",
  42. "OpenAIGPTLMHeadModel",
  43. "OpenAIGPTModel",
  44. "OpenAIGPTPreTrainedModel",
  45. "load_tf_weights_in_openai_gpt",
  46. ]
  47. try:
  48. if not is_tf_available():
  49. raise OptionalDependencyNotAvailable()
  50. except OptionalDependencyNotAvailable:
  51. pass
  52. else:
  53. _import_structure["modeling_tf_openai"] = [
  54. "TFOpenAIGPTDoubleHeadsModel",
  55. "TFOpenAIGPTForSequenceClassification",
  56. "TFOpenAIGPTLMHeadModel",
  57. "TFOpenAIGPTMainLayer",
  58. "TFOpenAIGPTModel",
  59. "TFOpenAIGPTPreTrainedModel",
  60. ]
  61. if TYPE_CHECKING:
  62. from .configuration_openai import OpenAIGPTConfig
  63. from .tokenization_openai import OpenAIGPTTokenizer
  64. try:
  65. if not is_tokenizers_available():
  66. raise OptionalDependencyNotAvailable()
  67. except OptionalDependencyNotAvailable:
  68. pass
  69. else:
  70. from .tokenization_openai_fast import OpenAIGPTTokenizerFast
  71. try:
  72. if not is_torch_available():
  73. raise OptionalDependencyNotAvailable()
  74. except OptionalDependencyNotAvailable:
  75. pass
  76. else:
  77. from .modeling_openai import (
  78. OpenAIGPTDoubleHeadsModel,
  79. OpenAIGPTForSequenceClassification,
  80. OpenAIGPTLMHeadModel,
  81. OpenAIGPTModel,
  82. OpenAIGPTPreTrainedModel,
  83. load_tf_weights_in_openai_gpt,
  84. )
  85. try:
  86. if not is_tf_available():
  87. raise OptionalDependencyNotAvailable()
  88. except OptionalDependencyNotAvailable:
  89. pass
  90. else:
  91. from .modeling_tf_openai import (
  92. TFOpenAIGPTDoubleHeadsModel,
  93. TFOpenAIGPTForSequenceClassification,
  94. TFOpenAIGPTLMHeadModel,
  95. TFOpenAIGPTMainLayer,
  96. TFOpenAIGPTModel,
  97. TFOpenAIGPTPreTrainedModel,
  98. )
  99. else:
  100. import sys
  101. sys.modules[__name__] = _LazyModule(__name__, globals()["__file__"], _import_structure, module_spec=__spec__)