__init__.py 1.7 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758
  1. # Copyright 2022 The HuggingFace Team. All rights reserved.
  2. #
  3. # Licensed under the Apache License, Version 2.0 (the "License");
  4. # you may not use this file except in compliance with the License.
  5. # You may obtain a copy of the License at
  6. #
  7. # http://www.apache.org/licenses/LICENSE-2.0
  8. #
  9. # Unless required by applicable law or agreed to in writing, software
  10. # distributed under the License is distributed on an "AS IS" BASIS,
  11. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12. # See the License for the specific language governing permissions and
  13. # limitations under the License.
  14. from typing import TYPE_CHECKING
  15. from ...utils import OptionalDependencyNotAvailable, _LazyModule, is_torch_available
  16. _import_structure = {
  17. "configuration_lilt": ["LiltConfig"],
  18. }
  19. try:
  20. if not is_torch_available():
  21. raise OptionalDependencyNotAvailable()
  22. except OptionalDependencyNotAvailable:
  23. pass
  24. else:
  25. _import_structure["modeling_lilt"] = [
  26. "LiltForQuestionAnswering",
  27. "LiltForSequenceClassification",
  28. "LiltForTokenClassification",
  29. "LiltModel",
  30. "LiltPreTrainedModel",
  31. ]
  32. if TYPE_CHECKING:
  33. from .configuration_lilt import LiltConfig
  34. try:
  35. if not is_torch_available():
  36. raise OptionalDependencyNotAvailable()
  37. except OptionalDependencyNotAvailable:
  38. pass
  39. else:
  40. from .modeling_lilt import (
  41. LiltForQuestionAnswering,
  42. LiltForSequenceClassification,
  43. LiltForTokenClassification,
  44. LiltModel,
  45. LiltPreTrainedModel,
  46. )
  47. else:
  48. import sys
  49. sys.modules[__name__] = _LazyModule(__name__, globals()["__file__"], _import_structure, module_spec=__spec__)