dataclasses.py 16 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366
  1. """Provide an enhanced dataclass that performs validation."""
  2. from __future__ import annotations as _annotations
  3. import dataclasses
  4. import sys
  5. import types
  6. from typing import TYPE_CHECKING, Any, Callable, Generic, NoReturn, TypeVar, overload
  7. from warnings import warn
  8. from typing_extensions import Literal, TypeGuard, dataclass_transform
  9. from ._internal import _config, _decorators, _namespace_utils, _typing_extra
  10. from ._internal import _dataclasses as _pydantic_dataclasses
  11. from ._migration import getattr_migration
  12. from .config import ConfigDict
  13. from .errors import PydanticUserError
  14. from .fields import Field, FieldInfo, PrivateAttr
  15. if TYPE_CHECKING:
  16. from ._internal._dataclasses import PydanticDataclass
  17. from ._internal._namespace_utils import MappingNamespace
  18. __all__ = 'dataclass', 'rebuild_dataclass'
  19. _T = TypeVar('_T')
  20. if sys.version_info >= (3, 10):
  21. @dataclass_transform(field_specifiers=(dataclasses.field, Field, PrivateAttr))
  22. @overload
  23. def dataclass(
  24. *,
  25. init: Literal[False] = False,
  26. repr: bool = True,
  27. eq: bool = True,
  28. order: bool = False,
  29. unsafe_hash: bool = False,
  30. frozen: bool = False,
  31. config: ConfigDict | type[object] | None = None,
  32. validate_on_init: bool | None = None,
  33. kw_only: bool = ...,
  34. slots: bool = ...,
  35. ) -> Callable[[type[_T]], type[PydanticDataclass]]: # type: ignore
  36. ...
  37. @dataclass_transform(field_specifiers=(dataclasses.field, Field, PrivateAttr))
  38. @overload
  39. def dataclass(
  40. _cls: type[_T], # type: ignore
  41. *,
  42. init: Literal[False] = False,
  43. repr: bool = True,
  44. eq: bool = True,
  45. order: bool = False,
  46. unsafe_hash: bool = False,
  47. frozen: bool | None = None,
  48. config: ConfigDict | type[object] | None = None,
  49. validate_on_init: bool | None = None,
  50. kw_only: bool = ...,
  51. slots: bool = ...,
  52. ) -> type[PydanticDataclass]: ...
  53. else:
  54. @dataclass_transform(field_specifiers=(dataclasses.field, Field, PrivateAttr))
  55. @overload
  56. def dataclass(
  57. *,
  58. init: Literal[False] = False,
  59. repr: bool = True,
  60. eq: bool = True,
  61. order: bool = False,
  62. unsafe_hash: bool = False,
  63. frozen: bool | None = None,
  64. config: ConfigDict | type[object] | None = None,
  65. validate_on_init: bool | None = None,
  66. ) -> Callable[[type[_T]], type[PydanticDataclass]]: # type: ignore
  67. ...
  68. @dataclass_transform(field_specifiers=(dataclasses.field, Field, PrivateAttr))
  69. @overload
  70. def dataclass(
  71. _cls: type[_T], # type: ignore
  72. *,
  73. init: Literal[False] = False,
  74. repr: bool = True,
  75. eq: bool = True,
  76. order: bool = False,
  77. unsafe_hash: bool = False,
  78. frozen: bool | None = None,
  79. config: ConfigDict | type[object] | None = None,
  80. validate_on_init: bool | None = None,
  81. ) -> type[PydanticDataclass]: ...
  82. @dataclass_transform(field_specifiers=(dataclasses.field, Field, PrivateAttr))
  83. def dataclass(
  84. _cls: type[_T] | None = None,
  85. *,
  86. init: Literal[False] = False,
  87. repr: bool = True,
  88. eq: bool = True,
  89. order: bool = False,
  90. unsafe_hash: bool = False,
  91. frozen: bool | None = None,
  92. config: ConfigDict | type[object] | None = None,
  93. validate_on_init: bool | None = None,
  94. kw_only: bool = False,
  95. slots: bool = False,
  96. ) -> Callable[[type[_T]], type[PydanticDataclass]] | type[PydanticDataclass]:
  97. """Usage docs: https://docs.pydantic.dev/2.10/concepts/dataclasses/
  98. A decorator used to create a Pydantic-enhanced dataclass, similar to the standard Python `dataclass`,
  99. but with added validation.
  100. This function should be used similarly to `dataclasses.dataclass`.
  101. Args:
  102. _cls: The target `dataclass`.
  103. init: Included for signature compatibility with `dataclasses.dataclass`, and is passed through to
  104. `dataclasses.dataclass` when appropriate. If specified, must be set to `False`, as pydantic inserts its
  105. own `__init__` function.
  106. repr: A boolean indicating whether to include the field in the `__repr__` output.
  107. eq: Determines if a `__eq__` method should be generated for the class.
  108. order: Determines if comparison magic methods should be generated, such as `__lt__`, but not `__eq__`.
  109. unsafe_hash: Determines if a `__hash__` method should be included in the class, as in `dataclasses.dataclass`.
  110. frozen: Determines if the generated class should be a 'frozen' `dataclass`, which does not allow its
  111. attributes to be modified after it has been initialized. If not set, the value from the provided `config` argument will be used (and will default to `False` otherwise).
  112. config: The Pydantic config to use for the `dataclass`.
  113. validate_on_init: A deprecated parameter included for backwards compatibility; in V2, all Pydantic dataclasses
  114. are validated on init.
  115. kw_only: Determines if `__init__` method parameters must be specified by keyword only. Defaults to `False`.
  116. slots: Determines if the generated class should be a 'slots' `dataclass`, which does not allow the addition of
  117. new attributes after instantiation.
  118. Returns:
  119. A decorator that accepts a class as its argument and returns a Pydantic `dataclass`.
  120. Raises:
  121. AssertionError: Raised if `init` is not `False` or `validate_on_init` is `False`.
  122. """
  123. assert init is False, 'pydantic.dataclasses.dataclass only supports init=False'
  124. assert validate_on_init is not False, 'validate_on_init=False is no longer supported'
  125. if sys.version_info >= (3, 10):
  126. kwargs = {'kw_only': kw_only, 'slots': slots}
  127. else:
  128. kwargs = {}
  129. def make_pydantic_fields_compatible(cls: type[Any]) -> None:
  130. """Make sure that stdlib `dataclasses` understands `Field` kwargs like `kw_only`
  131. To do that, we simply change
  132. `x: int = pydantic.Field(..., kw_only=True)`
  133. into
  134. `x: int = dataclasses.field(default=pydantic.Field(..., kw_only=True), kw_only=True)`
  135. """
  136. for annotation_cls in cls.__mro__:
  137. # In Python < 3.9, `__annotations__` might not be present if there are no fields.
  138. # we therefore need to use `getattr` to avoid an `AttributeError`.
  139. annotations = getattr(annotation_cls, '__annotations__', [])
  140. for field_name in annotations:
  141. field_value = getattr(cls, field_name, None)
  142. # Process only if this is an instance of `FieldInfo`.
  143. if not isinstance(field_value, FieldInfo):
  144. continue
  145. # Initialize arguments for the standard `dataclasses.field`.
  146. field_args: dict = {'default': field_value}
  147. # Handle `kw_only` for Python 3.10+
  148. if sys.version_info >= (3, 10) and field_value.kw_only:
  149. field_args['kw_only'] = True
  150. # Set `repr` attribute if it's explicitly specified to be not `True`.
  151. if field_value.repr is not True:
  152. field_args['repr'] = field_value.repr
  153. setattr(cls, field_name, dataclasses.field(**field_args))
  154. # In Python 3.8, dataclasses checks cls.__dict__['__annotations__'] for annotations,
  155. # so we must make sure it's initialized before we add to it.
  156. if cls.__dict__.get('__annotations__') is None:
  157. cls.__annotations__ = {}
  158. cls.__annotations__[field_name] = annotations[field_name]
  159. def create_dataclass(cls: type[Any]) -> type[PydanticDataclass]:
  160. """Create a Pydantic dataclass from a regular dataclass.
  161. Args:
  162. cls: The class to create the Pydantic dataclass from.
  163. Returns:
  164. A Pydantic dataclass.
  165. """
  166. from ._internal._utils import is_model_class
  167. if is_model_class(cls):
  168. raise PydanticUserError(
  169. f'Cannot create a Pydantic dataclass from {cls.__name__} as it is already a Pydantic model',
  170. code='dataclass-on-model',
  171. )
  172. original_cls = cls
  173. # we warn on conflicting config specifications, but only if the class doesn't have a dataclass base
  174. # because a dataclass base might provide a __pydantic_config__ attribute that we don't want to warn about
  175. has_dataclass_base = any(dataclasses.is_dataclass(base) for base in cls.__bases__)
  176. if not has_dataclass_base and config is not None and hasattr(cls, '__pydantic_config__'):
  177. warn(
  178. f'`config` is set via both the `dataclass` decorator and `__pydantic_config__` for dataclass {cls.__name__}. '
  179. f'The `config` specification from `dataclass` decorator will take priority.',
  180. category=UserWarning,
  181. stacklevel=2,
  182. )
  183. # if config is not explicitly provided, try to read it from the type
  184. config_dict = config if config is not None else getattr(cls, '__pydantic_config__', None)
  185. config_wrapper = _config.ConfigWrapper(config_dict)
  186. decorators = _decorators.DecoratorInfos.build(cls)
  187. # Keep track of the original __doc__ so that we can restore it after applying the dataclasses decorator
  188. # Otherwise, classes with no __doc__ will have their signature added into the JSON schema description,
  189. # since dataclasses.dataclass will set this as the __doc__
  190. original_doc = cls.__doc__
  191. if _pydantic_dataclasses.is_builtin_dataclass(cls):
  192. # Don't preserve the docstring for vanilla dataclasses, as it may include the signature
  193. # This matches v1 behavior, and there was an explicit test for it
  194. original_doc = None
  195. # We don't want to add validation to the existing std lib dataclass, so we will subclass it
  196. # If the class is generic, we need to make sure the subclass also inherits from Generic
  197. # with all the same parameters.
  198. bases = (cls,)
  199. if issubclass(cls, Generic):
  200. generic_base = Generic[cls.__parameters__] # type: ignore
  201. bases = bases + (generic_base,)
  202. cls = types.new_class(cls.__name__, bases)
  203. make_pydantic_fields_compatible(cls)
  204. # Respect frozen setting from dataclass constructor and fallback to config setting if not provided
  205. if frozen is not None:
  206. frozen_ = frozen
  207. if config_wrapper.frozen:
  208. # It's not recommended to define both, as the setting from the dataclass decorator will take priority.
  209. warn(
  210. f'`frozen` is set via both the `dataclass` decorator and `config` for dataclass {cls.__name__!r}.'
  211. 'This is not recommended. The `frozen` specification on `dataclass` will take priority.',
  212. category=UserWarning,
  213. stacklevel=2,
  214. )
  215. else:
  216. frozen_ = config_wrapper.frozen or False
  217. cls = dataclasses.dataclass( # type: ignore[call-overload]
  218. cls,
  219. # the value of init here doesn't affect anything except that it makes it easier to generate a signature
  220. init=True,
  221. repr=repr,
  222. eq=eq,
  223. order=order,
  224. unsafe_hash=unsafe_hash,
  225. frozen=frozen_,
  226. **kwargs,
  227. )
  228. cls.__pydantic_decorators__ = decorators # type: ignore
  229. cls.__doc__ = original_doc
  230. cls.__module__ = original_cls.__module__
  231. cls.__qualname__ = original_cls.__qualname__
  232. cls.__pydantic_complete__ = False # `complete_dataclass` will set it to `True` if successful.
  233. # TODO `parent_namespace` is currently None, but we could do the same thing as Pydantic models:
  234. # fetch the parent ns using `parent_frame_namespace` (if the dataclass was defined in a function),
  235. # and possibly cache it (see the `__pydantic_parent_namespace__` logic for models).
  236. _pydantic_dataclasses.complete_dataclass(cls, config_wrapper, raise_errors=False)
  237. return cls
  238. return create_dataclass if _cls is None else create_dataclass(_cls)
  239. __getattr__ = getattr_migration(__name__)
  240. if (3, 8) <= sys.version_info < (3, 11):
  241. # Monkeypatch dataclasses.InitVar so that typing doesn't error if it occurs as a type when evaluating type hints
  242. # Starting in 3.11, typing.get_type_hints will not raise an error if the retrieved type hints are not callable.
  243. def _call_initvar(*args: Any, **kwargs: Any) -> NoReturn:
  244. """This function does nothing but raise an error that is as similar as possible to what you'd get
  245. if you were to try calling `InitVar[int]()` without this monkeypatch. The whole purpose is just
  246. to ensure typing._type_check does not error if the type hint evaluates to `InitVar[<parameter>]`.
  247. """
  248. raise TypeError("'InitVar' object is not callable")
  249. dataclasses.InitVar.__call__ = _call_initvar
  250. def rebuild_dataclass(
  251. cls: type[PydanticDataclass],
  252. *,
  253. force: bool = False,
  254. raise_errors: bool = True,
  255. _parent_namespace_depth: int = 2,
  256. _types_namespace: MappingNamespace | None = None,
  257. ) -> bool | None:
  258. """Try to rebuild the pydantic-core schema for the dataclass.
  259. This may be necessary when one of the annotations is a ForwardRef which could not be resolved during
  260. the initial attempt to build the schema, and automatic rebuilding fails.
  261. This is analogous to `BaseModel.model_rebuild`.
  262. Args:
  263. cls: The class to rebuild the pydantic-core schema for.
  264. force: Whether to force the rebuilding of the schema, defaults to `False`.
  265. raise_errors: Whether to raise errors, defaults to `True`.
  266. _parent_namespace_depth: The depth level of the parent namespace, defaults to 2.
  267. _types_namespace: The types namespace, defaults to `None`.
  268. Returns:
  269. Returns `None` if the schema is already "complete" and rebuilding was not required.
  270. If rebuilding _was_ required, returns `True` if rebuilding was successful, otherwise `False`.
  271. """
  272. if not force and cls.__pydantic_complete__:
  273. return None
  274. if '__pydantic_core_schema__' in cls.__dict__:
  275. delattr(cls, '__pydantic_core_schema__') # delete cached value to ensure full rebuild happens
  276. if _types_namespace is not None:
  277. rebuild_ns = _types_namespace
  278. elif _parent_namespace_depth > 0:
  279. rebuild_ns = _typing_extra.parent_frame_namespace(parent_depth=_parent_namespace_depth, force=True) or {}
  280. else:
  281. rebuild_ns = {}
  282. ns_resolver = _namespace_utils.NsResolver(
  283. parent_namespace=rebuild_ns,
  284. )
  285. return _pydantic_dataclasses.complete_dataclass(
  286. cls,
  287. _config.ConfigWrapper(cls.__pydantic_config__, check=False),
  288. raise_errors=raise_errors,
  289. ns_resolver=ns_resolver,
  290. # We could provide a different config instead (with `'defer_build'` set to `True`)
  291. # of this explicit `_force_build` argument, but because config can come from the
  292. # decorator parameter or the `__pydantic_config__` attribute, `complete_dataclass`
  293. # will overwrite `__pydantic_config__` with the provided config above:
  294. _force_build=True,
  295. )
  296. def is_pydantic_dataclass(class_: type[Any], /) -> TypeGuard[type[PydanticDataclass]]:
  297. """Whether a class is a pydantic dataclass.
  298. Args:
  299. class_: The class.
  300. Returns:
  301. `True` if the class is a pydantic dataclass, `False` otherwise.
  302. """
  303. try:
  304. return '__pydantic_validator__' in class_.__dict__ and dataclasses.is_dataclass(class_)
  305. except AttributeError:
  306. return False