aboutsummaryrefslogtreecommitdiff
path: root/.venv/lib/python3.12/site-packages/pydantic/_internal
diff options
context:
space:
mode:
Diffstat (limited to '.venv/lib/python3.12/site-packages/pydantic/_internal')
-rw-r--r--.venv/lib/python3.12/site-packages/pydantic/_internal/__init__.py0
-rw-r--r--.venv/lib/python3.12/site-packages/pydantic/_internal/_config.py345
-rw-r--r--.venv/lib/python3.12/site-packages/pydantic/_internal/_core_metadata.py91
-rw-r--r--.venv/lib/python3.12/site-packages/pydantic/_internal/_core_utils.py610
-rw-r--r--.venv/lib/python3.12/site-packages/pydantic/_internal/_dataclasses.py246
-rw-r--r--.venv/lib/python3.12/site-packages/pydantic/_internal/_decorators.py823
-rw-r--r--.venv/lib/python3.12/site-packages/pydantic/_internal/_decorators_v1.py174
-rw-r--r--.venv/lib/python3.12/site-packages/pydantic/_internal/_discriminated_union.py503
-rw-r--r--.venv/lib/python3.12/site-packages/pydantic/_internal/_docs_extraction.py108
-rw-r--r--.venv/lib/python3.12/site-packages/pydantic/_internal/_fields.py392
-rw-r--r--.venv/lib/python3.12/site-packages/pydantic/_internal/_forward_ref.py23
-rw-r--r--.venv/lib/python3.12/site-packages/pydantic/_internal/_generate_schema.py2522
-rw-r--r--.venv/lib/python3.12/site-packages/pydantic/_internal/_generics.py536
-rw-r--r--.venv/lib/python3.12/site-packages/pydantic/_internal/_git.py27
-rw-r--r--.venv/lib/python3.12/site-packages/pydantic/_internal/_import_utils.py20
-rw-r--r--.venv/lib/python3.12/site-packages/pydantic/_internal/_internal_dataclass.py7
-rw-r--r--.venv/lib/python3.12/site-packages/pydantic/_internal/_known_annotated_metadata.py392
-rw-r--r--.venv/lib/python3.12/site-packages/pydantic/_internal/_mock_val_ser.py235
-rw-r--r--.venv/lib/python3.12/site-packages/pydantic/_internal/_model_construction.py792
-rw-r--r--.venv/lib/python3.12/site-packages/pydantic/_internal/_namespace_utils.py284
-rw-r--r--.venv/lib/python3.12/site-packages/pydantic/_internal/_repr.py123
-rw-r--r--.venv/lib/python3.12/site-packages/pydantic/_internal/_schema_generation_shared.py126
-rw-r--r--.venv/lib/python3.12/site-packages/pydantic/_internal/_serializers.py51
-rw-r--r--.venv/lib/python3.12/site-packages/pydantic/_internal/_signature.py188
-rw-r--r--.venv/lib/python3.12/site-packages/pydantic/_internal/_std_types_schema.py404
-rw-r--r--.venv/lib/python3.12/site-packages/pydantic/_internal/_typing_extra.py893
-rw-r--r--.venv/lib/python3.12/site-packages/pydantic/_internal/_utils.py389
-rw-r--r--.venv/lib/python3.12/site-packages/pydantic/_internal/_validate_call.py115
-rw-r--r--.venv/lib/python3.12/site-packages/pydantic/_internal/_validators.py424
29 files changed, 10843 insertions, 0 deletions
diff --git a/.venv/lib/python3.12/site-packages/pydantic/_internal/__init__.py b/.venv/lib/python3.12/site-packages/pydantic/_internal/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pydantic/_internal/__init__.py
diff --git a/.venv/lib/python3.12/site-packages/pydantic/_internal/_config.py b/.venv/lib/python3.12/site-packages/pydantic/_internal/_config.py
new file mode 100644
index 00000000..6d491c29
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pydantic/_internal/_config.py
@@ -0,0 +1,345 @@
+from __future__ import annotations as _annotations
+
+import warnings
+from contextlib import contextmanager
+from re import Pattern
+from typing import (
+ TYPE_CHECKING,
+ Any,
+ Callable,
+ cast,
+)
+
+from pydantic_core import core_schema
+from typing_extensions import (
+ Literal,
+ Self,
+)
+
+from ..aliases import AliasGenerator
+from ..config import ConfigDict, ExtraValues, JsonDict, JsonEncoder, JsonSchemaExtraCallable
+from ..errors import PydanticUserError
+from ..warnings import PydanticDeprecatedSince20, PydanticDeprecatedSince210
+
+if not TYPE_CHECKING:
+ # See PyCharm issues https://youtrack.jetbrains.com/issue/PY-21915
+ # and https://youtrack.jetbrains.com/issue/PY-51428
+ DeprecationWarning = PydanticDeprecatedSince20
+
+if TYPE_CHECKING:
+ from .._internal._schema_generation_shared import GenerateSchema
+ from ..fields import ComputedFieldInfo, FieldInfo
+
+DEPRECATION_MESSAGE = 'Support for class-based `config` is deprecated, use ConfigDict instead.'
+
+
+class ConfigWrapper:
+ """Internal wrapper for Config which exposes ConfigDict items as attributes."""
+
+ __slots__ = ('config_dict',)
+
+ config_dict: ConfigDict
+
+ # all annotations are copied directly from ConfigDict, and should be kept up to date, a test will fail if they
+ # stop matching
+ title: str | None
+ str_to_lower: bool
+ str_to_upper: bool
+ str_strip_whitespace: bool
+ str_min_length: int
+ str_max_length: int | None
+ extra: ExtraValues | None
+ frozen: bool
+ populate_by_name: bool
+ use_enum_values: bool
+ validate_assignment: bool
+ arbitrary_types_allowed: bool
+ from_attributes: bool
+ # whether to use the actual key provided in the data (e.g. alias or first alias for "field required" errors) instead of field_names
+ # to construct error `loc`s, default `True`
+ loc_by_alias: bool
+ alias_generator: Callable[[str], str] | AliasGenerator | None
+ model_title_generator: Callable[[type], str] | None
+ field_title_generator: Callable[[str, FieldInfo | ComputedFieldInfo], str] | None
+ ignored_types: tuple[type, ...]
+ allow_inf_nan: bool
+ json_schema_extra: JsonDict | JsonSchemaExtraCallable | None
+ json_encoders: dict[type[object], JsonEncoder] | None
+
+ # new in V2
+ strict: bool
+ # whether instances of models and dataclasses (including subclass instances) should re-validate, default 'never'
+ revalidate_instances: Literal['always', 'never', 'subclass-instances']
+ ser_json_timedelta: Literal['iso8601', 'float']
+ ser_json_bytes: Literal['utf8', 'base64', 'hex']
+ val_json_bytes: Literal['utf8', 'base64', 'hex']
+ ser_json_inf_nan: Literal['null', 'constants', 'strings']
+ # whether to validate default values during validation, default False
+ validate_default: bool
+ validate_return: bool
+ protected_namespaces: tuple[str | Pattern[str], ...]
+ hide_input_in_errors: bool
+ defer_build: bool
+ plugin_settings: dict[str, object] | None
+ schema_generator: type[GenerateSchema] | None
+ json_schema_serialization_defaults_required: bool
+ json_schema_mode_override: Literal['validation', 'serialization', None]
+ coerce_numbers_to_str: bool
+ regex_engine: Literal['rust-regex', 'python-re']
+ validation_error_cause: bool
+ use_attribute_docstrings: bool
+ cache_strings: bool | Literal['all', 'keys', 'none']
+
+ def __init__(self, config: ConfigDict | dict[str, Any] | type[Any] | None, *, check: bool = True):
+ if check:
+ self.config_dict = prepare_config(config)
+ else:
+ self.config_dict = cast(ConfigDict, config)
+
+ @classmethod
+ def for_model(cls, bases: tuple[type[Any], ...], namespace: dict[str, Any], kwargs: dict[str, Any]) -> Self:
+ """Build a new `ConfigWrapper` instance for a `BaseModel`.
+
+ The config wrapper built based on (in descending order of priority):
+ - options from `kwargs`
+ - options from the `namespace`
+ - options from the base classes (`bases`)
+
+ Args:
+ bases: A tuple of base classes.
+ namespace: The namespace of the class being created.
+ kwargs: The kwargs passed to the class being created.
+
+ Returns:
+ A `ConfigWrapper` instance for `BaseModel`.
+ """
+ config_new = ConfigDict()
+ for base in bases:
+ config = getattr(base, 'model_config', None)
+ if config:
+ config_new.update(config.copy())
+
+ config_class_from_namespace = namespace.get('Config')
+ config_dict_from_namespace = namespace.get('model_config')
+
+ raw_annotations = namespace.get('__annotations__', {})
+ if raw_annotations.get('model_config') and config_dict_from_namespace is None:
+ raise PydanticUserError(
+ '`model_config` cannot be used as a model field name. Use `model_config` for model configuration.',
+ code='model-config-invalid-field-name',
+ )
+
+ if config_class_from_namespace and config_dict_from_namespace:
+ raise PydanticUserError('"Config" and "model_config" cannot be used together', code='config-both')
+
+ config_from_namespace = config_dict_from_namespace or prepare_config(config_class_from_namespace)
+
+ config_new.update(config_from_namespace)
+
+ for k in list(kwargs.keys()):
+ if k in config_keys:
+ config_new[k] = kwargs.pop(k)
+
+ return cls(config_new)
+
+ # we don't show `__getattr__` to type checkers so missing attributes cause errors
+ if not TYPE_CHECKING: # pragma: no branch
+
+ def __getattr__(self, name: str) -> Any:
+ try:
+ return self.config_dict[name]
+ except KeyError:
+ try:
+ return config_defaults[name]
+ except KeyError:
+ raise AttributeError(f'Config has no attribute {name!r}') from None
+
+ def core_config(self, title: str | None) -> core_schema.CoreConfig:
+ """Create a pydantic-core config.
+
+ We don't use getattr here since we don't want to populate with defaults.
+
+ Args:
+ title: The title to use if not set in config.
+
+ Returns:
+ A `CoreConfig` object created from config.
+ """
+ config = self.config_dict
+
+ if config.get('schema_generator') is not None:
+ warnings.warn(
+ 'The `schema_generator` setting has been deprecated since v2.10. This setting no longer has any effect.',
+ PydanticDeprecatedSince210,
+ stacklevel=2,
+ )
+
+ core_config_values = {
+ 'title': config.get('title') or title or None,
+ 'extra_fields_behavior': config.get('extra'),
+ 'allow_inf_nan': config.get('allow_inf_nan'),
+ 'populate_by_name': config.get('populate_by_name'),
+ 'str_strip_whitespace': config.get('str_strip_whitespace'),
+ 'str_to_lower': config.get('str_to_lower'),
+ 'str_to_upper': config.get('str_to_upper'),
+ 'strict': config.get('strict'),
+ 'ser_json_timedelta': config.get('ser_json_timedelta'),
+ 'ser_json_bytes': config.get('ser_json_bytes'),
+ 'val_json_bytes': config.get('val_json_bytes'),
+ 'ser_json_inf_nan': config.get('ser_json_inf_nan'),
+ 'from_attributes': config.get('from_attributes'),
+ 'loc_by_alias': config.get('loc_by_alias'),
+ 'revalidate_instances': config.get('revalidate_instances'),
+ 'validate_default': config.get('validate_default'),
+ 'str_max_length': config.get('str_max_length'),
+ 'str_min_length': config.get('str_min_length'),
+ 'hide_input_in_errors': config.get('hide_input_in_errors'),
+ 'coerce_numbers_to_str': config.get('coerce_numbers_to_str'),
+ 'regex_engine': config.get('regex_engine'),
+ 'validation_error_cause': config.get('validation_error_cause'),
+ 'cache_strings': config.get('cache_strings'),
+ }
+
+ return core_schema.CoreConfig(**{k: v for k, v in core_config_values.items() if v is not None})
+
+ def __repr__(self):
+ c = ', '.join(f'{k}={v!r}' for k, v in self.config_dict.items())
+ return f'ConfigWrapper({c})'
+
+
+class ConfigWrapperStack:
+ """A stack of `ConfigWrapper` instances."""
+
+ def __init__(self, config_wrapper: ConfigWrapper):
+ self._config_wrapper_stack: list[ConfigWrapper] = [config_wrapper]
+
+ @property
+ def tail(self) -> ConfigWrapper:
+ return self._config_wrapper_stack[-1]
+
+ @contextmanager
+ def push(self, config_wrapper: ConfigWrapper | ConfigDict | None):
+ if config_wrapper is None:
+ yield
+ return
+
+ if not isinstance(config_wrapper, ConfigWrapper):
+ config_wrapper = ConfigWrapper(config_wrapper, check=False)
+
+ self._config_wrapper_stack.append(config_wrapper)
+ try:
+ yield
+ finally:
+ self._config_wrapper_stack.pop()
+
+
+config_defaults = ConfigDict(
+ title=None,
+ str_to_lower=False,
+ str_to_upper=False,
+ str_strip_whitespace=False,
+ str_min_length=0,
+ str_max_length=None,
+ # let the model / dataclass decide how to handle it
+ extra=None,
+ frozen=False,
+ populate_by_name=False,
+ use_enum_values=False,
+ validate_assignment=False,
+ arbitrary_types_allowed=False,
+ from_attributes=False,
+ loc_by_alias=True,
+ alias_generator=None,
+ model_title_generator=None,
+ field_title_generator=None,
+ ignored_types=(),
+ allow_inf_nan=True,
+ json_schema_extra=None,
+ strict=False,
+ revalidate_instances='never',
+ ser_json_timedelta='iso8601',
+ ser_json_bytes='utf8',
+ val_json_bytes='utf8',
+ ser_json_inf_nan='null',
+ validate_default=False,
+ validate_return=False,
+ protected_namespaces=('model_validate', 'model_dump'),
+ hide_input_in_errors=False,
+ json_encoders=None,
+ defer_build=False,
+ schema_generator=None,
+ plugin_settings=None,
+ json_schema_serialization_defaults_required=False,
+ json_schema_mode_override=None,
+ coerce_numbers_to_str=False,
+ regex_engine='rust-regex',
+ validation_error_cause=False,
+ use_attribute_docstrings=False,
+ cache_strings=True,
+)
+
+
+def prepare_config(config: ConfigDict | dict[str, Any] | type[Any] | None) -> ConfigDict:
+ """Create a `ConfigDict` instance from an existing dict, a class (e.g. old class-based config) or None.
+
+ Args:
+ config: The input config.
+
+ Returns:
+ A ConfigDict object created from config.
+ """
+ if config is None:
+ return ConfigDict()
+
+ if not isinstance(config, dict):
+ warnings.warn(DEPRECATION_MESSAGE, DeprecationWarning)
+ config = {k: getattr(config, k) for k in dir(config) if not k.startswith('__')}
+
+ config_dict = cast(ConfigDict, config)
+ check_deprecated(config_dict)
+ return config_dict
+
+
+config_keys = set(ConfigDict.__annotations__.keys())
+
+
+V2_REMOVED_KEYS = {
+ 'allow_mutation',
+ 'error_msg_templates',
+ 'fields',
+ 'getter_dict',
+ 'smart_union',
+ 'underscore_attrs_are_private',
+ 'json_loads',
+ 'json_dumps',
+ 'copy_on_model_validation',
+ 'post_init_call',
+}
+V2_RENAMED_KEYS = {
+ 'allow_population_by_field_name': 'populate_by_name',
+ 'anystr_lower': 'str_to_lower',
+ 'anystr_strip_whitespace': 'str_strip_whitespace',
+ 'anystr_upper': 'str_to_upper',
+ 'keep_untouched': 'ignored_types',
+ 'max_anystr_length': 'str_max_length',
+ 'min_anystr_length': 'str_min_length',
+ 'orm_mode': 'from_attributes',
+ 'schema_extra': 'json_schema_extra',
+ 'validate_all': 'validate_default',
+}
+
+
+def check_deprecated(config_dict: ConfigDict) -> None:
+ """Check for deprecated config keys and warn the user.
+
+ Args:
+ config_dict: The input config.
+ """
+ deprecated_removed_keys = V2_REMOVED_KEYS & config_dict.keys()
+ deprecated_renamed_keys = V2_RENAMED_KEYS.keys() & config_dict.keys()
+ if deprecated_removed_keys or deprecated_renamed_keys:
+ renamings = {k: V2_RENAMED_KEYS[k] for k in sorted(deprecated_renamed_keys)}
+ renamed_bullets = [f'* {k!r} has been renamed to {v!r}' for k, v in renamings.items()]
+ removed_bullets = [f'* {k!r} has been removed' for k in sorted(deprecated_removed_keys)]
+ message = '\n'.join(['Valid config keys have changed in V2:'] + renamed_bullets + removed_bullets)
+ warnings.warn(message, UserWarning)
diff --git a/.venv/lib/python3.12/site-packages/pydantic/_internal/_core_metadata.py b/.venv/lib/python3.12/site-packages/pydantic/_internal/_core_metadata.py
new file mode 100644
index 00000000..89e3e788
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pydantic/_internal/_core_metadata.py
@@ -0,0 +1,91 @@
+from __future__ import annotations as _annotations
+
+from typing import TYPE_CHECKING, Any, TypedDict, cast
+from warnings import warn
+
+if TYPE_CHECKING:
+ from ..config import JsonDict, JsonSchemaExtraCallable
+ from ._schema_generation_shared import (
+ GetJsonSchemaFunction,
+ )
+
+
+class CoreMetadata(TypedDict, total=False):
+ """A `TypedDict` for holding the metadata dict of the schema.
+
+ Attributes:
+ pydantic_js_functions: List of JSON schema functions that resolve refs during application.
+ pydantic_js_annotation_functions: List of JSON schema functions that don't resolve refs during application.
+ pydantic_js_prefer_positional_arguments: Whether JSON schema generator will
+ prefer positional over keyword arguments for an 'arguments' schema.
+ custom validation function. Only applies to before, plain, and wrap validators.
+ pydantic_js_udpates: key / value pair updates to apply to the JSON schema for a type.
+ pydantic_js_extra: WIP, either key/value pair updates to apply to the JSON schema, or a custom callable.
+
+ TODO: Perhaps we should move this structure to pydantic-core. At the moment, though,
+ it's easier to iterate on if we leave it in pydantic until we feel there is a semi-stable API.
+
+ TODO: It's unfortunate how functionally oriented JSON schema generation is, especially that which occurs during
+ the core schema generation process. It's inevitable that we need to store some json schema related information
+ on core schemas, given that we generate JSON schemas directly from core schemas. That being said, debugging related
+ issues is quite difficult when JSON schema information is disguised via dynamically defined functions.
+ """
+
+ pydantic_js_functions: list[GetJsonSchemaFunction]
+ pydantic_js_annotation_functions: list[GetJsonSchemaFunction]
+ pydantic_js_prefer_positional_arguments: bool
+ pydantic_js_updates: JsonDict
+ pydantic_js_extra: JsonDict | JsonSchemaExtraCallable
+
+
+def update_core_metadata(
+ core_metadata: Any,
+ /,
+ *,
+ pydantic_js_functions: list[GetJsonSchemaFunction] | None = None,
+ pydantic_js_annotation_functions: list[GetJsonSchemaFunction] | None = None,
+ pydantic_js_updates: JsonDict | None = None,
+ pydantic_js_extra: JsonDict | JsonSchemaExtraCallable | None = None,
+) -> None:
+ from ..json_schema import PydanticJsonSchemaWarning
+
+ """Update CoreMetadata instance in place. When we make modifications in this function, they
+ take effect on the `core_metadata` reference passed in as the first (and only) positional argument.
+
+ First, cast to `CoreMetadata`, then finish with a cast to `dict[str, Any]` for core schema compatibility.
+ We do this here, instead of before / after each call to this function so that this typing hack
+ can be easily removed if/when we move `CoreMetadata` to `pydantic-core`.
+
+ For parameter descriptions, see `CoreMetadata` above.
+ """
+ core_metadata = cast(CoreMetadata, core_metadata)
+
+ if pydantic_js_functions:
+ core_metadata.setdefault('pydantic_js_functions', []).extend(pydantic_js_functions)
+
+ if pydantic_js_annotation_functions:
+ core_metadata.setdefault('pydantic_js_annotation_functions', []).extend(pydantic_js_annotation_functions)
+
+ if pydantic_js_updates:
+ if (existing_updates := core_metadata.get('pydantic_js_updates')) is not None:
+ core_metadata['pydantic_js_updates'] = {**existing_updates, **pydantic_js_updates}
+ else:
+ core_metadata['pydantic_js_updates'] = pydantic_js_updates
+
+ if pydantic_js_extra is not None:
+ existing_pydantic_js_extra = core_metadata.get('pydantic_js_extra')
+ if existing_pydantic_js_extra is None:
+ core_metadata['pydantic_js_extra'] = pydantic_js_extra
+ if isinstance(existing_pydantic_js_extra, dict):
+ if isinstance(pydantic_js_extra, dict):
+ core_metadata['pydantic_js_extra'] = {**existing_pydantic_js_extra, **pydantic_js_extra}
+ if callable(pydantic_js_extra):
+ warn(
+ 'Composing `dict` and `callable` type `json_schema_extra` is not supported.'
+ 'The `callable` type is being ignored.'
+ "If you'd like support for this behavior, please open an issue on pydantic.",
+ PydanticJsonSchemaWarning,
+ )
+ if callable(existing_pydantic_js_extra):
+ # if ever there's a case of a callable, we'll just keep the last json schema extra spec
+ core_metadata['pydantic_js_extra'] = pydantic_js_extra
diff --git a/.venv/lib/python3.12/site-packages/pydantic/_internal/_core_utils.py b/.venv/lib/python3.12/site-packages/pydantic/_internal/_core_utils.py
new file mode 100644
index 00000000..f6ab20e4
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pydantic/_internal/_core_utils.py
@@ -0,0 +1,610 @@
+from __future__ import annotations
+
+import os
+from collections import defaultdict
+from typing import Any, Callable, Hashable, TypeVar, Union
+
+from pydantic_core import CoreSchema, core_schema
+from pydantic_core import validate_core_schema as _validate_core_schema
+from typing_extensions import TypeGuard, get_args, get_origin
+
+from ..errors import PydanticUserError
+from . import _repr
+from ._core_metadata import CoreMetadata
+from ._typing_extra import is_generic_alias, is_type_alias_type
+
+AnyFunctionSchema = Union[
+ core_schema.AfterValidatorFunctionSchema,
+ core_schema.BeforeValidatorFunctionSchema,
+ core_schema.WrapValidatorFunctionSchema,
+ core_schema.PlainValidatorFunctionSchema,
+]
+
+
+FunctionSchemaWithInnerSchema = Union[
+ core_schema.AfterValidatorFunctionSchema,
+ core_schema.BeforeValidatorFunctionSchema,
+ core_schema.WrapValidatorFunctionSchema,
+]
+
+CoreSchemaField = Union[
+ core_schema.ModelField, core_schema.DataclassField, core_schema.TypedDictField, core_schema.ComputedField
+]
+CoreSchemaOrField = Union[core_schema.CoreSchema, CoreSchemaField]
+
+_CORE_SCHEMA_FIELD_TYPES = {'typed-dict-field', 'dataclass-field', 'model-field', 'computed-field'}
+_FUNCTION_WITH_INNER_SCHEMA_TYPES = {'function-before', 'function-after', 'function-wrap'}
+_LIST_LIKE_SCHEMA_WITH_ITEMS_TYPES = {'list', 'set', 'frozenset'}
+
+TAGGED_UNION_TAG_KEY = 'pydantic.internal.tagged_union_tag'
+"""
+Used in a `Tag` schema to specify the tag used for a discriminated union.
+"""
+
+
+def is_core_schema(
+ schema: CoreSchemaOrField,
+) -> TypeGuard[CoreSchema]:
+ return schema['type'] not in _CORE_SCHEMA_FIELD_TYPES
+
+
+def is_core_schema_field(
+ schema: CoreSchemaOrField,
+) -> TypeGuard[CoreSchemaField]:
+ return schema['type'] in _CORE_SCHEMA_FIELD_TYPES
+
+
+def is_function_with_inner_schema(
+ schema: CoreSchemaOrField,
+) -> TypeGuard[FunctionSchemaWithInnerSchema]:
+ return schema['type'] in _FUNCTION_WITH_INNER_SCHEMA_TYPES
+
+
+def is_list_like_schema_with_items_schema(
+ schema: CoreSchema,
+) -> TypeGuard[core_schema.ListSchema | core_schema.SetSchema | core_schema.FrozenSetSchema]:
+ return schema['type'] in _LIST_LIKE_SCHEMA_WITH_ITEMS_TYPES
+
+
+def get_type_ref(type_: type[Any], args_override: tuple[type[Any], ...] | None = None) -> str:
+ """Produces the ref to be used for this type by pydantic_core's core schemas.
+
+ This `args_override` argument was added for the purpose of creating valid recursive references
+ when creating generic models without needing to create a concrete class.
+ """
+ origin = get_origin(type_) or type_
+
+ args = get_args(type_) if is_generic_alias(type_) else (args_override or ())
+ generic_metadata = getattr(type_, '__pydantic_generic_metadata__', None)
+ if generic_metadata:
+ origin = generic_metadata['origin'] or origin
+ args = generic_metadata['args'] or args
+
+ module_name = getattr(origin, '__module__', '<No __module__>')
+ if is_type_alias_type(origin):
+ type_ref = f'{module_name}.{origin.__name__}:{id(origin)}'
+ else:
+ try:
+ qualname = getattr(origin, '__qualname__', f'<No __qualname__: {origin}>')
+ except Exception:
+ qualname = getattr(origin, '__qualname__', '<No __qualname__>')
+ type_ref = f'{module_name}.{qualname}:{id(origin)}'
+
+ arg_refs: list[str] = []
+ for arg in args:
+ if isinstance(arg, str):
+ # Handle string literals as a special case; we may be able to remove this special handling if we
+ # wrap them in a ForwardRef at some point.
+ arg_ref = f'{arg}:str-{id(arg)}'
+ else:
+ arg_ref = f'{_repr.display_as_type(arg)}:{id(arg)}'
+ arg_refs.append(arg_ref)
+ if arg_refs:
+ type_ref = f'{type_ref}[{",".join(arg_refs)}]'
+ return type_ref
+
+
+def get_ref(s: core_schema.CoreSchema) -> None | str:
+ """Get the ref from the schema if it has one.
+ This exists just for type checking to work correctly.
+ """
+ return s.get('ref', None)
+
+
+def collect_definitions(schema: core_schema.CoreSchema) -> dict[str, core_schema.CoreSchema]:
+ defs: dict[str, CoreSchema] = {}
+
+ def _record_valid_refs(s: core_schema.CoreSchema, recurse: Recurse) -> core_schema.CoreSchema:
+ ref = get_ref(s)
+ if ref:
+ defs[ref] = s
+ return recurse(s, _record_valid_refs)
+
+ walk_core_schema(schema, _record_valid_refs, copy=False)
+
+ return defs
+
+
+def define_expected_missing_refs(
+ schema: core_schema.CoreSchema, allowed_missing_refs: set[str]
+) -> core_schema.CoreSchema | None:
+ if not allowed_missing_refs:
+ # in this case, there are no missing refs to potentially substitute, so there's no need to walk the schema
+ # this is a common case (will be hit for all non-generic models), so it's worth optimizing for
+ return None
+
+ refs = collect_definitions(schema).keys()
+
+ expected_missing_refs = allowed_missing_refs.difference(refs)
+ if expected_missing_refs:
+ definitions: list[core_schema.CoreSchema] = [
+ core_schema.invalid_schema(ref=ref) for ref in expected_missing_refs
+ ]
+ return core_schema.definitions_schema(schema, definitions)
+ return None
+
+
+def collect_invalid_schemas(schema: core_schema.CoreSchema) -> bool:
+ invalid = False
+
+ def _is_schema_valid(s: core_schema.CoreSchema, recurse: Recurse) -> core_schema.CoreSchema:
+ nonlocal invalid
+
+ if s['type'] == 'invalid':
+ invalid = True
+ return s
+
+ return recurse(s, _is_schema_valid)
+
+ walk_core_schema(schema, _is_schema_valid, copy=False)
+ return invalid
+
+
+T = TypeVar('T')
+
+
+Recurse = Callable[[core_schema.CoreSchema, 'Walk'], core_schema.CoreSchema]
+Walk = Callable[[core_schema.CoreSchema, Recurse], core_schema.CoreSchema]
+
+# TODO: Should we move _WalkCoreSchema into pydantic_core proper?
+# Issue: https://github.com/pydantic/pydantic-core/issues/615
+
+CoreSchemaT = TypeVar('CoreSchemaT')
+
+
+class _WalkCoreSchema:
+ def __init__(self, *, copy: bool = True):
+ self._schema_type_to_method = self._build_schema_type_to_method()
+ self._copy = copy
+
+ def _copy_schema(self, schema: CoreSchemaT) -> CoreSchemaT:
+ return schema.copy() if self._copy else schema # pyright: ignore[reportAttributeAccessIssue]
+
+ def _build_schema_type_to_method(self) -> dict[core_schema.CoreSchemaType, Recurse]:
+ mapping: dict[core_schema.CoreSchemaType, Recurse] = {}
+ key: core_schema.CoreSchemaType
+ for key in get_args(core_schema.CoreSchemaType):
+ method_name = f"handle_{key.replace('-', '_')}_schema"
+ mapping[key] = getattr(self, method_name, self._handle_other_schemas)
+ return mapping
+
+ def walk(self, schema: core_schema.CoreSchema, f: Walk) -> core_schema.CoreSchema:
+ return f(schema, self._walk)
+
+ def _walk(self, schema: core_schema.CoreSchema, f: Walk) -> core_schema.CoreSchema:
+ schema = self._schema_type_to_method[schema['type']](self._copy_schema(schema), f)
+ ser_schema: core_schema.SerSchema | None = schema.get('serialization') # type: ignore
+ if ser_schema:
+ schema['serialization'] = self._handle_ser_schemas(ser_schema, f)
+ return schema
+
+ def _handle_other_schemas(self, schema: core_schema.CoreSchema, f: Walk) -> core_schema.CoreSchema:
+ sub_schema = schema.get('schema', None)
+ if sub_schema is not None:
+ schema['schema'] = self.walk(sub_schema, f) # type: ignore
+ return schema
+
+ def _handle_ser_schemas(self, ser_schema: core_schema.SerSchema, f: Walk) -> core_schema.SerSchema:
+ schema: core_schema.CoreSchema | None = ser_schema.get('schema', None)
+ return_schema: core_schema.CoreSchema | None = ser_schema.get('return_schema', None)
+ if schema is not None or return_schema is not None:
+ ser_schema = self._copy_schema(ser_schema)
+ if schema is not None:
+ ser_schema['schema'] = self.walk(schema, f) # type: ignore
+ if return_schema is not None:
+ ser_schema['return_schema'] = self.walk(return_schema, f) # type: ignore
+ return ser_schema
+
+ def handle_definitions_schema(self, schema: core_schema.DefinitionsSchema, f: Walk) -> core_schema.CoreSchema:
+ new_definitions: list[core_schema.CoreSchema] = []
+ for definition in schema['definitions']:
+ if 'schema_ref' in definition and 'ref' in definition:
+ # This indicates a purposely indirect reference
+ # We want to keep such references around for implications related to JSON schema, etc.:
+ new_definitions.append(definition)
+ # However, we still need to walk the referenced definition:
+ self.walk(definition, f)
+ continue
+
+ updated_definition = self.walk(definition, f)
+ if 'ref' in updated_definition:
+ # If the updated definition schema doesn't have a 'ref', it shouldn't go in the definitions
+ # This is most likely to happen due to replacing something with a definition reference, in
+ # which case it should certainly not go in the definitions list
+ new_definitions.append(updated_definition)
+ new_inner_schema = self.walk(schema['schema'], f)
+
+ if not new_definitions and len(schema) == 3:
+ # This means we'd be returning a "trivial" definitions schema that just wrapped the inner schema
+ return new_inner_schema
+
+ new_schema = self._copy_schema(schema)
+ new_schema['schema'] = new_inner_schema
+ new_schema['definitions'] = new_definitions
+ return new_schema
+
+ def handle_list_schema(self, schema: core_schema.ListSchema, f: Walk) -> core_schema.CoreSchema:
+ items_schema = schema.get('items_schema')
+ if items_schema is not None:
+ schema['items_schema'] = self.walk(items_schema, f)
+ return schema
+
+ def handle_set_schema(self, schema: core_schema.SetSchema, f: Walk) -> core_schema.CoreSchema:
+ items_schema = schema.get('items_schema')
+ if items_schema is not None:
+ schema['items_schema'] = self.walk(items_schema, f)
+ return schema
+
+ def handle_frozenset_schema(self, schema: core_schema.FrozenSetSchema, f: Walk) -> core_schema.CoreSchema:
+ items_schema = schema.get('items_schema')
+ if items_schema is not None:
+ schema['items_schema'] = self.walk(items_schema, f)
+ return schema
+
+ def handle_generator_schema(self, schema: core_schema.GeneratorSchema, f: Walk) -> core_schema.CoreSchema:
+ items_schema = schema.get('items_schema')
+ if items_schema is not None:
+ schema['items_schema'] = self.walk(items_schema, f)
+ return schema
+
+ def handle_tuple_schema(self, schema: core_schema.TupleSchema, f: Walk) -> core_schema.CoreSchema:
+ schema['items_schema'] = [self.walk(v, f) for v in schema['items_schema']]
+ return schema
+
+ def handle_dict_schema(self, schema: core_schema.DictSchema, f: Walk) -> core_schema.CoreSchema:
+ keys_schema = schema.get('keys_schema')
+ if keys_schema is not None:
+ schema['keys_schema'] = self.walk(keys_schema, f)
+ values_schema = schema.get('values_schema')
+ if values_schema:
+ schema['values_schema'] = self.walk(values_schema, f)
+ return schema
+
+ def handle_function_after_schema(
+ self, schema: core_schema.AfterValidatorFunctionSchema, f: Walk
+ ) -> core_schema.CoreSchema:
+ schema['schema'] = self.walk(schema['schema'], f)
+ return schema
+
+ def handle_function_before_schema(
+ self, schema: core_schema.BeforeValidatorFunctionSchema, f: Walk
+ ) -> core_schema.CoreSchema:
+ schema['schema'] = self.walk(schema['schema'], f)
+ if 'json_schema_input_schema' in schema:
+ schema['json_schema_input_schema'] = self.walk(schema['json_schema_input_schema'], f)
+ return schema
+
+ # TODO duplicate schema types for serializers and validators, needs to be deduplicated:
+ def handle_function_plain_schema(
+ self, schema: core_schema.PlainValidatorFunctionSchema | core_schema.PlainSerializerFunctionSerSchema, f: Walk
+ ) -> core_schema.CoreSchema:
+ if 'json_schema_input_schema' in schema:
+ schema['json_schema_input_schema'] = self.walk(schema['json_schema_input_schema'], f)
+ return schema # pyright: ignore[reportReturnType]
+
+ # TODO duplicate schema types for serializers and validators, needs to be deduplicated:
+ def handle_function_wrap_schema(
+ self, schema: core_schema.WrapValidatorFunctionSchema | core_schema.WrapSerializerFunctionSerSchema, f: Walk
+ ) -> core_schema.CoreSchema:
+ if 'schema' in schema:
+ schema['schema'] = self.walk(schema['schema'], f)
+ if 'json_schema_input_schema' in schema:
+ schema['json_schema_input_schema'] = self.walk(schema['json_schema_input_schema'], f)
+ return schema # pyright: ignore[reportReturnType]
+
+ def handle_union_schema(self, schema: core_schema.UnionSchema, f: Walk) -> core_schema.CoreSchema:
+ new_choices: list[CoreSchema | tuple[CoreSchema, str]] = []
+ for v in schema['choices']:
+ if isinstance(v, tuple):
+ new_choices.append((self.walk(v[0], f), v[1]))
+ else:
+ new_choices.append(self.walk(v, f))
+ schema['choices'] = new_choices
+ return schema
+
+ def handle_tagged_union_schema(self, schema: core_schema.TaggedUnionSchema, f: Walk) -> core_schema.CoreSchema:
+ new_choices: dict[Hashable, core_schema.CoreSchema] = {}
+ for k, v in schema['choices'].items():
+ new_choices[k] = v if isinstance(v, (str, int)) else self.walk(v, f)
+ schema['choices'] = new_choices
+ return schema
+
+ def handle_chain_schema(self, schema: core_schema.ChainSchema, f: Walk) -> core_schema.CoreSchema:
+ schema['steps'] = [self.walk(v, f) for v in schema['steps']]
+ return schema
+
+ def handle_lax_or_strict_schema(self, schema: core_schema.LaxOrStrictSchema, f: Walk) -> core_schema.CoreSchema:
+ schema['lax_schema'] = self.walk(schema['lax_schema'], f)
+ schema['strict_schema'] = self.walk(schema['strict_schema'], f)
+ return schema
+
+ def handle_json_or_python_schema(self, schema: core_schema.JsonOrPythonSchema, f: Walk) -> core_schema.CoreSchema:
+ schema['json_schema'] = self.walk(schema['json_schema'], f)
+ schema['python_schema'] = self.walk(schema['python_schema'], f)
+ return schema
+
+ def handle_model_fields_schema(self, schema: core_schema.ModelFieldsSchema, f: Walk) -> core_schema.CoreSchema:
+ extras_schema = schema.get('extras_schema')
+ if extras_schema is not None:
+ schema['extras_schema'] = self.walk(extras_schema, f)
+ replaced_fields: dict[str, core_schema.ModelField] = {}
+ replaced_computed_fields: list[core_schema.ComputedField] = []
+ for computed_field in schema.get('computed_fields', ()):
+ replaced_field = self._copy_schema(computed_field)
+ replaced_field['return_schema'] = self.walk(computed_field['return_schema'], f)
+ replaced_computed_fields.append(replaced_field)
+ if replaced_computed_fields:
+ schema['computed_fields'] = replaced_computed_fields
+ for k, v in schema['fields'].items():
+ replaced_field = self._copy_schema(v)
+ replaced_field['schema'] = self.walk(v['schema'], f)
+ replaced_fields[k] = replaced_field
+ schema['fields'] = replaced_fields
+ return schema
+
+ def handle_typed_dict_schema(self, schema: core_schema.TypedDictSchema, f: Walk) -> core_schema.CoreSchema:
+ extras_schema = schema.get('extras_schema')
+ if extras_schema is not None:
+ schema['extras_schema'] = self.walk(extras_schema, f)
+ replaced_computed_fields: list[core_schema.ComputedField] = []
+ for computed_field in schema.get('computed_fields', ()):
+ replaced_field = self._copy_schema(computed_field)
+ replaced_field['return_schema'] = self.walk(computed_field['return_schema'], f)
+ replaced_computed_fields.append(replaced_field)
+ if replaced_computed_fields:
+ schema['computed_fields'] = replaced_computed_fields
+ replaced_fields: dict[str, core_schema.TypedDictField] = {}
+ for k, v in schema['fields'].items():
+ replaced_field = self._copy_schema(v)
+ replaced_field['schema'] = self.walk(v['schema'], f)
+ replaced_fields[k] = replaced_field
+ schema['fields'] = replaced_fields
+ return schema
+
+ def handle_dataclass_args_schema(self, schema: core_schema.DataclassArgsSchema, f: Walk) -> core_schema.CoreSchema:
+ replaced_fields: list[core_schema.DataclassField] = []
+ replaced_computed_fields: list[core_schema.ComputedField] = []
+ for computed_field in schema.get('computed_fields', ()):
+ replaced_field = self._copy_schema(computed_field)
+ replaced_field['return_schema'] = self.walk(computed_field['return_schema'], f)
+ replaced_computed_fields.append(replaced_field)
+ if replaced_computed_fields:
+ schema['computed_fields'] = replaced_computed_fields
+ for field in schema['fields']:
+ replaced_field = self._copy_schema(field)
+ replaced_field['schema'] = self.walk(field['schema'], f)
+ replaced_fields.append(replaced_field)
+ schema['fields'] = replaced_fields
+ return schema
+
+ def handle_arguments_schema(self, schema: core_schema.ArgumentsSchema, f: Walk) -> core_schema.CoreSchema:
+ replaced_arguments_schema: list[core_schema.ArgumentsParameter] = []
+ for param in schema['arguments_schema']:
+ replaced_param = self._copy_schema(param)
+ replaced_param['schema'] = self.walk(param['schema'], f)
+ replaced_arguments_schema.append(replaced_param)
+ schema['arguments_schema'] = replaced_arguments_schema
+ if 'var_args_schema' in schema:
+ schema['var_args_schema'] = self.walk(schema['var_args_schema'], f)
+ if 'var_kwargs_schema' in schema:
+ schema['var_kwargs_schema'] = self.walk(schema['var_kwargs_schema'], f)
+ return schema
+
+ def handle_call_schema(self, schema: core_schema.CallSchema, f: Walk) -> core_schema.CoreSchema:
+ schema['arguments_schema'] = self.walk(schema['arguments_schema'], f)
+ if 'return_schema' in schema:
+ schema['return_schema'] = self.walk(schema['return_schema'], f)
+ return schema
+
+
+_dispatch = _WalkCoreSchema().walk
+_dispatch_no_copy = _WalkCoreSchema(copy=False).walk
+
+
+def walk_core_schema(schema: core_schema.CoreSchema, f: Walk, *, copy: bool = True) -> core_schema.CoreSchema:
+ """Recursively traverse a CoreSchema.
+
+ Args:
+ schema (core_schema.CoreSchema): The CoreSchema to process, it will not be modified.
+ f (Walk): A function to apply. This function takes two arguments:
+ 1. The current CoreSchema that is being processed
+ (not the same one you passed into this function, one level down).
+ 2. The "next" `f` to call. This lets you for example use `f=functools.partial(some_method, some_context)`
+ to pass data down the recursive calls without using globals or other mutable state.
+ copy: Whether schema should be recursively copied.
+
+ Returns:
+ core_schema.CoreSchema: A processed CoreSchema.
+ """
+ return f(schema.copy() if copy else schema, _dispatch if copy else _dispatch_no_copy)
+
+
+def simplify_schema_references(schema: core_schema.CoreSchema) -> core_schema.CoreSchema: # noqa: C901
+ definitions: dict[str, core_schema.CoreSchema] = {}
+ ref_counts: dict[str, int] = defaultdict(int)
+ involved_in_recursion: dict[str, bool] = {}
+ current_recursion_ref_count: dict[str, int] = defaultdict(int)
+
+ def collect_refs(s: core_schema.CoreSchema, recurse: Recurse) -> core_schema.CoreSchema:
+ if s['type'] == 'definitions':
+ for definition in s['definitions']:
+ ref = get_ref(definition)
+ assert ref is not None
+ if ref not in definitions:
+ definitions[ref] = definition
+ recurse(definition, collect_refs)
+ return recurse(s['schema'], collect_refs)
+ else:
+ ref = get_ref(s)
+ if ref is not None:
+ new = recurse(s, collect_refs)
+ new_ref = get_ref(new)
+ if new_ref:
+ definitions[new_ref] = new
+ return core_schema.definition_reference_schema(schema_ref=ref)
+ else:
+ return recurse(s, collect_refs)
+
+ schema = walk_core_schema(schema, collect_refs)
+
+ def count_refs(s: core_schema.CoreSchema, recurse: Recurse) -> core_schema.CoreSchema:
+ if s['type'] != 'definition-ref':
+ return recurse(s, count_refs)
+ ref = s['schema_ref']
+ ref_counts[ref] += 1
+
+ if ref_counts[ref] >= 2:
+ # If this model is involved in a recursion this should be detected
+ # on its second encounter, we can safely stop the walk here.
+ if current_recursion_ref_count[ref] != 0:
+ involved_in_recursion[ref] = True
+ return s
+
+ current_recursion_ref_count[ref] += 1
+ if 'serialization' in s:
+ # Even though this is a `'definition-ref'` schema, there might
+ # be more references inside the serialization schema:
+ recurse(s, count_refs)
+
+ next_s = definitions[ref]
+ visited: set[str] = set()
+ while next_s['type'] == 'definition-ref':
+ if next_s['schema_ref'] in visited:
+ raise PydanticUserError(
+ f'{ref} contains a circular reference to itself.', code='circular-reference-schema'
+ )
+
+ visited.add(next_s['schema_ref'])
+ ref_counts[next_s['schema_ref']] += 1
+ next_s = definitions[next_s['schema_ref']]
+
+ recurse(next_s, count_refs)
+ current_recursion_ref_count[ref] -= 1
+ return s
+
+ schema = walk_core_schema(schema, count_refs, copy=False)
+
+ assert all(c == 0 for c in current_recursion_ref_count.values()), 'this is a bug! please report it'
+
+ def can_be_inlined(s: core_schema.DefinitionReferenceSchema, ref: str) -> bool:
+ if ref_counts[ref] > 1:
+ return False
+ if involved_in_recursion.get(ref, False):
+ return False
+ if 'serialization' in s:
+ return False
+ if 'metadata' in s:
+ metadata = s['metadata']
+ for k in [
+ *CoreMetadata.__annotations__.keys(),
+ 'pydantic.internal.union_discriminator',
+ 'pydantic.internal.tagged_union_tag',
+ ]:
+ if k in metadata:
+ # we need to keep this as a ref
+ return False
+ return True
+
+ def inline_refs(s: core_schema.CoreSchema, recurse: Recurse) -> core_schema.CoreSchema:
+ # Assume there are no infinite loops, because we already checked for that in `count_refs`
+ while s['type'] == 'definition-ref':
+ ref = s['schema_ref']
+
+ # Check if the reference is only used once, not involved in recursion and does not have
+ # any extra keys (like 'serialization')
+ if can_be_inlined(s, ref):
+ # Inline the reference by replacing the reference with the actual schema
+ new = definitions.pop(ref)
+ ref_counts[ref] -= 1 # because we just replaced it!
+ # put all other keys that were on the def-ref schema into the inlined version
+ # in particular this is needed for `serialization`
+ if 'serialization' in s:
+ new['serialization'] = s['serialization']
+ s = new
+ else:
+ break
+ return recurse(s, inline_refs)
+
+ schema = walk_core_schema(schema, inline_refs, copy=False)
+
+ def_values = [v for v in definitions.values() if ref_counts[v['ref']] > 0] # type: ignore
+
+ if def_values:
+ schema = core_schema.definitions_schema(schema=schema, definitions=def_values)
+ return schema
+
+
+def _strip_metadata(schema: CoreSchema) -> CoreSchema:
+ def strip_metadata(s: CoreSchema, recurse: Recurse) -> CoreSchema:
+ s = s.copy()
+ s.pop('metadata', None)
+ if s['type'] == 'model-fields':
+ s = s.copy()
+ s['fields'] = {k: v.copy() for k, v in s['fields'].items()}
+ for field_name, field_schema in s['fields'].items():
+ field_schema.pop('metadata', None)
+ s['fields'][field_name] = field_schema
+ computed_fields = s.get('computed_fields', None)
+ if computed_fields:
+ s['computed_fields'] = [cf.copy() for cf in computed_fields]
+ for cf in computed_fields:
+ cf.pop('metadata', None)
+ else:
+ s.pop('computed_fields', None)
+ elif s['type'] == 'model':
+ # remove some defaults
+ if s.get('custom_init', True) is False:
+ s.pop('custom_init')
+ if s.get('root_model', True) is False:
+ s.pop('root_model')
+ if {'title'}.issuperset(s.get('config', {}).keys()):
+ s.pop('config', None)
+
+ return recurse(s, strip_metadata)
+
+ return walk_core_schema(schema, strip_metadata)
+
+
+def pretty_print_core_schema(
+ schema: CoreSchema,
+ include_metadata: bool = False,
+) -> None:
+ """Pretty print a CoreSchema using rich.
+ This is intended for debugging purposes.
+
+ Args:
+ schema: The CoreSchema to print.
+ include_metadata: Whether to include metadata in the output. Defaults to `False`.
+ """
+ from rich import print # type: ignore # install it manually in your dev env
+
+ if not include_metadata:
+ schema = _strip_metadata(schema)
+
+ return print(schema)
+
+
+def validate_core_schema(schema: CoreSchema) -> CoreSchema:
+ if 'PYDANTIC_SKIP_VALIDATING_CORE_SCHEMAS' in os.environ:
+ return schema
+ return _validate_core_schema(schema)
diff --git a/.venv/lib/python3.12/site-packages/pydantic/_internal/_dataclasses.py b/.venv/lib/python3.12/site-packages/pydantic/_internal/_dataclasses.py
new file mode 100644
index 00000000..f2e6f22f
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pydantic/_internal/_dataclasses.py
@@ -0,0 +1,246 @@
+"""Private logic for creating pydantic dataclasses."""
+
+from __future__ import annotations as _annotations
+
+import dataclasses
+import typing
+import warnings
+from functools import partial, wraps
+from typing import Any, ClassVar
+
+from pydantic_core import (
+ ArgsKwargs,
+ SchemaSerializer,
+ SchemaValidator,
+ core_schema,
+)
+from typing_extensions import TypeGuard
+
+from ..errors import PydanticUndefinedAnnotation
+from ..plugin._schema_validator import PluggableSchemaValidator, create_schema_validator
+from ..warnings import PydanticDeprecatedSince20
+from . import _config, _decorators
+from ._fields import collect_dataclass_fields
+from ._generate_schema import GenerateSchema
+from ._generics import get_standard_typevars_map
+from ._mock_val_ser import set_dataclass_mocks
+from ._namespace_utils import NsResolver
+from ._schema_generation_shared import CallbackGetCoreSchemaHandler
+from ._signature import generate_pydantic_signature
+from ._utils import LazyClassAttribute
+
+if typing.TYPE_CHECKING:
+ from _typeshed import DataclassInstance as StandardDataclass
+
+ from ..config import ConfigDict
+ from ..fields import FieldInfo
+
+ class PydanticDataclass(StandardDataclass, typing.Protocol):
+ """A protocol containing attributes only available once a class has been decorated as a Pydantic dataclass.
+
+ Attributes:
+ __pydantic_config__: Pydantic-specific configuration settings for the dataclass.
+ __pydantic_complete__: Whether dataclass building is completed, or if there are still undefined fields.
+ __pydantic_core_schema__: The pydantic-core schema used to build the SchemaValidator and SchemaSerializer.
+ __pydantic_decorators__: Metadata containing the decorators defined on the dataclass.
+ __pydantic_fields__: Metadata about the fields defined on the dataclass.
+ __pydantic_serializer__: The pydantic-core SchemaSerializer used to dump instances of the dataclass.
+ __pydantic_validator__: The pydantic-core SchemaValidator used to validate instances of the dataclass.
+ """
+
+ __pydantic_config__: ClassVar[ConfigDict]
+ __pydantic_complete__: ClassVar[bool]
+ __pydantic_core_schema__: ClassVar[core_schema.CoreSchema]
+ __pydantic_decorators__: ClassVar[_decorators.DecoratorInfos]
+ __pydantic_fields__: ClassVar[dict[str, FieldInfo]]
+ __pydantic_serializer__: ClassVar[SchemaSerializer]
+ __pydantic_validator__: ClassVar[SchemaValidator | PluggableSchemaValidator]
+
+else:
+ # See PyCharm issues https://youtrack.jetbrains.com/issue/PY-21915
+ # and https://youtrack.jetbrains.com/issue/PY-51428
+ DeprecationWarning = PydanticDeprecatedSince20
+
+
+def set_dataclass_fields(
+ cls: type[StandardDataclass],
+ ns_resolver: NsResolver | None = None,
+ config_wrapper: _config.ConfigWrapper | None = None,
+) -> None:
+ """Collect and set `cls.__pydantic_fields__`.
+
+ Args:
+ cls: The class.
+ ns_resolver: Namespace resolver to use when getting dataclass annotations.
+ config_wrapper: The config wrapper instance, defaults to `None`.
+ """
+ typevars_map = get_standard_typevars_map(cls)
+ fields = collect_dataclass_fields(
+ cls, ns_resolver=ns_resolver, typevars_map=typevars_map, config_wrapper=config_wrapper
+ )
+
+ cls.__pydantic_fields__ = fields # type: ignore
+
+
+def complete_dataclass(
+ cls: type[Any],
+ config_wrapper: _config.ConfigWrapper,
+ *,
+ raise_errors: bool = True,
+ ns_resolver: NsResolver | None = None,
+ _force_build: bool = False,
+) -> bool:
+ """Finish building a pydantic dataclass.
+
+ This logic is called on a class which has already been wrapped in `dataclasses.dataclass()`.
+
+ This is somewhat analogous to `pydantic._internal._model_construction.complete_model_class`.
+
+ Args:
+ cls: The class.
+ config_wrapper: The config wrapper instance.
+ raise_errors: Whether to raise errors, defaults to `True`.
+ ns_resolver: The namespace resolver instance to use when collecting dataclass fields
+ and during schema building.
+ _force_build: Whether to force building the dataclass, no matter if
+ [`defer_build`][pydantic.config.ConfigDict.defer_build] is set.
+
+ Returns:
+ `True` if building a pydantic dataclass is successfully completed, `False` otherwise.
+
+ Raises:
+ PydanticUndefinedAnnotation: If `raise_error` is `True` and there is an undefined annotations.
+ """
+ original_init = cls.__init__
+
+ # dataclass.__init__ must be defined here so its `__qualname__` can be changed since functions can't be copied,
+ # and so that the mock validator is used if building was deferred:
+ def __init__(__dataclass_self__: PydanticDataclass, *args: Any, **kwargs: Any) -> None:
+ __tracebackhide__ = True
+ s = __dataclass_self__
+ s.__pydantic_validator__.validate_python(ArgsKwargs(args, kwargs), self_instance=s)
+
+ __init__.__qualname__ = f'{cls.__qualname__}.__init__'
+
+ cls.__init__ = __init__ # type: ignore
+ cls.__pydantic_config__ = config_wrapper.config_dict # type: ignore
+
+ set_dataclass_fields(cls, ns_resolver, config_wrapper=config_wrapper)
+
+ if not _force_build and config_wrapper.defer_build:
+ set_dataclass_mocks(cls, cls.__name__)
+ return False
+
+ if hasattr(cls, '__post_init_post_parse__'):
+ warnings.warn(
+ 'Support for `__post_init_post_parse__` has been dropped, the method will not be called', DeprecationWarning
+ )
+
+ typevars_map = get_standard_typevars_map(cls)
+ gen_schema = GenerateSchema(
+ config_wrapper,
+ ns_resolver=ns_resolver,
+ typevars_map=typevars_map,
+ )
+
+ # set __signature__ attr only for the class, but not for its instances
+ # (because instances can define `__call__`, and `inspect.signature` shouldn't
+ # use the `__signature__` attribute and instead generate from `__call__`).
+ cls.__signature__ = LazyClassAttribute(
+ '__signature__',
+ partial(
+ generate_pydantic_signature,
+ # It's important that we reference the `original_init` here,
+ # as it is the one synthesized by the stdlib `dataclass` module:
+ init=original_init,
+ fields=cls.__pydantic_fields__, # type: ignore
+ populate_by_name=config_wrapper.populate_by_name,
+ extra=config_wrapper.extra,
+ is_dataclass=True,
+ ),
+ )
+ get_core_schema = getattr(cls, '__get_pydantic_core_schema__', None)
+ try:
+ if get_core_schema:
+ schema = get_core_schema(
+ cls,
+ CallbackGetCoreSchemaHandler(
+ partial(gen_schema.generate_schema, from_dunder_get_core_schema=False),
+ gen_schema,
+ ref_mode='unpack',
+ ),
+ )
+ else:
+ schema = gen_schema.generate_schema(cls, from_dunder_get_core_schema=False)
+ except PydanticUndefinedAnnotation as e:
+ if raise_errors:
+ raise
+ set_dataclass_mocks(cls, cls.__name__, f'`{e.name}`')
+ return False
+
+ core_config = config_wrapper.core_config(title=cls.__name__)
+
+ try:
+ schema = gen_schema.clean_schema(schema)
+ except gen_schema.CollectedInvalid:
+ set_dataclass_mocks(cls, cls.__name__, 'all referenced types')
+ return False
+
+ # We are about to set all the remaining required properties expected for this cast;
+ # __pydantic_decorators__ and __pydantic_fields__ should already be set
+ cls = typing.cast('type[PydanticDataclass]', cls)
+ # debug(schema)
+
+ cls.__pydantic_core_schema__ = schema
+ cls.__pydantic_validator__ = validator = create_schema_validator(
+ schema, cls, cls.__module__, cls.__qualname__, 'dataclass', core_config, config_wrapper.plugin_settings
+ )
+ cls.__pydantic_serializer__ = SchemaSerializer(schema, core_config)
+
+ if config_wrapper.validate_assignment:
+
+ @wraps(cls.__setattr__)
+ def validated_setattr(instance: Any, field: str, value: str, /) -> None:
+ validator.validate_assignment(instance, field, value)
+
+ cls.__setattr__ = validated_setattr.__get__(None, cls) # type: ignore
+
+ cls.__pydantic_complete__ = True
+ return True
+
+
+def is_builtin_dataclass(_cls: type[Any]) -> TypeGuard[type[StandardDataclass]]:
+ """Returns True if a class is a stdlib dataclass and *not* a pydantic dataclass.
+
+ We check that
+ - `_cls` is a dataclass
+ - `_cls` does not inherit from a processed pydantic dataclass (and thus have a `__pydantic_validator__`)
+ - `_cls` does not have any annotations that are not dataclass fields
+ e.g.
+ ```python
+ import dataclasses
+
+ import pydantic.dataclasses
+
+ @dataclasses.dataclass
+ class A:
+ x: int
+
+ @pydantic.dataclasses.dataclass
+ class B(A):
+ y: int
+ ```
+ In this case, when we first check `B`, we make an extra check and look at the annotations ('y'),
+ which won't be a superset of all the dataclass fields (only the stdlib fields i.e. 'x')
+
+ Args:
+ cls: The class.
+
+ Returns:
+ `True` if the class is a stdlib dataclass, `False` otherwise.
+ """
+ return (
+ dataclasses.is_dataclass(_cls)
+ and not hasattr(_cls, '__pydantic_validator__')
+ and set(_cls.__dataclass_fields__).issuperset(set(getattr(_cls, '__annotations__', {})))
+ )
diff --git a/.venv/lib/python3.12/site-packages/pydantic/_internal/_decorators.py b/.venv/lib/python3.12/site-packages/pydantic/_internal/_decorators.py
new file mode 100644
index 00000000..bb0f7207
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pydantic/_internal/_decorators.py
@@ -0,0 +1,823 @@
+"""Logic related to validators applied to models etc. via the `@field_validator` and `@model_validator` decorators."""
+
+from __future__ import annotations as _annotations
+
+from collections import deque
+from dataclasses import dataclass, field
+from functools import cached_property, partial, partialmethod
+from inspect import Parameter, Signature, isdatadescriptor, ismethoddescriptor, signature
+from itertools import islice
+from typing import TYPE_CHECKING, Any, Callable, ClassVar, Generic, Iterable, TypeVar, Union
+
+from pydantic_core import PydanticUndefined, core_schema
+from typing_extensions import Literal, TypeAlias, is_typeddict
+
+from ..errors import PydanticUserError
+from ._core_utils import get_type_ref
+from ._internal_dataclass import slots_true
+from ._namespace_utils import GlobalsNamespace, MappingNamespace
+from ._typing_extra import get_function_type_hints
+from ._utils import can_be_positional
+
+if TYPE_CHECKING:
+ from ..fields import ComputedFieldInfo
+ from ..functional_validators import FieldValidatorModes
+
+
+@dataclass(**slots_true)
+class ValidatorDecoratorInfo:
+ """A container for data from `@validator` so that we can access it
+ while building the pydantic-core schema.
+
+ Attributes:
+ decorator_repr: A class variable representing the decorator string, '@validator'.
+ fields: A tuple of field names the validator should be called on.
+ mode: The proposed validator mode.
+ each_item: For complex objects (sets, lists etc.) whether to validate individual
+ elements rather than the whole object.
+ always: Whether this method and other validators should be called even if the value is missing.
+ check_fields: Whether to check that the fields actually exist on the model.
+ """
+
+ decorator_repr: ClassVar[str] = '@validator'
+
+ fields: tuple[str, ...]
+ mode: Literal['before', 'after']
+ each_item: bool
+ always: bool
+ check_fields: bool | None
+
+
+@dataclass(**slots_true)
+class FieldValidatorDecoratorInfo:
+ """A container for data from `@field_validator` so that we can access it
+ while building the pydantic-core schema.
+
+ Attributes:
+ decorator_repr: A class variable representing the decorator string, '@field_validator'.
+ fields: A tuple of field names the validator should be called on.
+ mode: The proposed validator mode.
+ check_fields: Whether to check that the fields actually exist on the model.
+ json_schema_input_type: The input type of the function. This is only used to generate
+ the appropriate JSON Schema (in validation mode) and can only specified
+ when `mode` is either `'before'`, `'plain'` or `'wrap'`.
+ """
+
+ decorator_repr: ClassVar[str] = '@field_validator'
+
+ fields: tuple[str, ...]
+ mode: FieldValidatorModes
+ check_fields: bool | None
+ json_schema_input_type: Any
+
+
+@dataclass(**slots_true)
+class RootValidatorDecoratorInfo:
+ """A container for data from `@root_validator` so that we can access it
+ while building the pydantic-core schema.
+
+ Attributes:
+ decorator_repr: A class variable representing the decorator string, '@root_validator'.
+ mode: The proposed validator mode.
+ """
+
+ decorator_repr: ClassVar[str] = '@root_validator'
+ mode: Literal['before', 'after']
+
+
+@dataclass(**slots_true)
+class FieldSerializerDecoratorInfo:
+ """A container for data from `@field_serializer` so that we can access it
+ while building the pydantic-core schema.
+
+ Attributes:
+ decorator_repr: A class variable representing the decorator string, '@field_serializer'.
+ fields: A tuple of field names the serializer should be called on.
+ mode: The proposed serializer mode.
+ return_type: The type of the serializer's return value.
+ when_used: The serialization condition. Accepts a string with values `'always'`, `'unless-none'`, `'json'`,
+ and `'json-unless-none'`.
+ check_fields: Whether to check that the fields actually exist on the model.
+ """
+
+ decorator_repr: ClassVar[str] = '@field_serializer'
+ fields: tuple[str, ...]
+ mode: Literal['plain', 'wrap']
+ return_type: Any
+ when_used: core_schema.WhenUsed
+ check_fields: bool | None
+
+
+@dataclass(**slots_true)
+class ModelSerializerDecoratorInfo:
+ """A container for data from `@model_serializer` so that we can access it
+ while building the pydantic-core schema.
+
+ Attributes:
+ decorator_repr: A class variable representing the decorator string, '@model_serializer'.
+ mode: The proposed serializer mode.
+ return_type: The type of the serializer's return value.
+ when_used: The serialization condition. Accepts a string with values `'always'`, `'unless-none'`, `'json'`,
+ and `'json-unless-none'`.
+ """
+
+ decorator_repr: ClassVar[str] = '@model_serializer'
+ mode: Literal['plain', 'wrap']
+ return_type: Any
+ when_used: core_schema.WhenUsed
+
+
+@dataclass(**slots_true)
+class ModelValidatorDecoratorInfo:
+ """A container for data from `@model_validator` so that we can access it
+ while building the pydantic-core schema.
+
+ Attributes:
+ decorator_repr: A class variable representing the decorator string, '@model_validator'.
+ mode: The proposed serializer mode.
+ """
+
+ decorator_repr: ClassVar[str] = '@model_validator'
+ mode: Literal['wrap', 'before', 'after']
+
+
+DecoratorInfo: TypeAlias = """Union[
+ ValidatorDecoratorInfo,
+ FieldValidatorDecoratorInfo,
+ RootValidatorDecoratorInfo,
+ FieldSerializerDecoratorInfo,
+ ModelSerializerDecoratorInfo,
+ ModelValidatorDecoratorInfo,
+ ComputedFieldInfo,
+]"""
+
+ReturnType = TypeVar('ReturnType')
+DecoratedType: TypeAlias = (
+ 'Union[classmethod[Any, Any, ReturnType], staticmethod[Any, ReturnType], Callable[..., ReturnType], property]'
+)
+
+
+@dataclass # can't use slots here since we set attributes on `__post_init__`
+class PydanticDescriptorProxy(Generic[ReturnType]):
+ """Wrap a classmethod, staticmethod, property or unbound function
+ and act as a descriptor that allows us to detect decorated items
+ from the class' attributes.
+
+ This class' __get__ returns the wrapped item's __get__ result,
+ which makes it transparent for classmethods and staticmethods.
+
+ Attributes:
+ wrapped: The decorator that has to be wrapped.
+ decorator_info: The decorator info.
+ shim: A wrapper function to wrap V1 style function.
+ """
+
+ wrapped: DecoratedType[ReturnType]
+ decorator_info: DecoratorInfo
+ shim: Callable[[Callable[..., Any]], Callable[..., Any]] | None = None
+
+ def __post_init__(self):
+ for attr in 'setter', 'deleter':
+ if hasattr(self.wrapped, attr):
+ f = partial(self._call_wrapped_attr, name=attr)
+ setattr(self, attr, f)
+
+ def _call_wrapped_attr(self, func: Callable[[Any], None], *, name: str) -> PydanticDescriptorProxy[ReturnType]:
+ self.wrapped = getattr(self.wrapped, name)(func)
+ if isinstance(self.wrapped, property):
+ # update ComputedFieldInfo.wrapped_property
+ from ..fields import ComputedFieldInfo
+
+ if isinstance(self.decorator_info, ComputedFieldInfo):
+ self.decorator_info.wrapped_property = self.wrapped
+ return self
+
+ def __get__(self, obj: object | None, obj_type: type[object] | None = None) -> PydanticDescriptorProxy[ReturnType]:
+ try:
+ return self.wrapped.__get__(obj, obj_type)
+ except AttributeError:
+ # not a descriptor, e.g. a partial object
+ return self.wrapped # type: ignore[return-value]
+
+ def __set_name__(self, instance: Any, name: str) -> None:
+ if hasattr(self.wrapped, '__set_name__'):
+ self.wrapped.__set_name__(instance, name) # pyright: ignore[reportFunctionMemberAccess]
+
+ def __getattr__(self, __name: str) -> Any:
+ """Forward checks for __isabstractmethod__ and such."""
+ return getattr(self.wrapped, __name)
+
+
+DecoratorInfoType = TypeVar('DecoratorInfoType', bound=DecoratorInfo)
+
+
+@dataclass(**slots_true)
+class Decorator(Generic[DecoratorInfoType]):
+ """A generic container class to join together the decorator metadata
+ (metadata from decorator itself, which we have when the
+ decorator is called but not when we are building the core-schema)
+ and the bound function (which we have after the class itself is created).
+
+ Attributes:
+ cls_ref: The class ref.
+ cls_var_name: The decorated function name.
+ func: The decorated function.
+ shim: A wrapper function to wrap V1 style function.
+ info: The decorator info.
+ """
+
+ cls_ref: str
+ cls_var_name: str
+ func: Callable[..., Any]
+ shim: Callable[[Any], Any] | None
+ info: DecoratorInfoType
+
+ @staticmethod
+ def build(
+ cls_: Any,
+ *,
+ cls_var_name: str,
+ shim: Callable[[Any], Any] | None,
+ info: DecoratorInfoType,
+ ) -> Decorator[DecoratorInfoType]:
+ """Build a new decorator.
+
+ Args:
+ cls_: The class.
+ cls_var_name: The decorated function name.
+ shim: A wrapper function to wrap V1 style function.
+ info: The decorator info.
+
+ Returns:
+ The new decorator instance.
+ """
+ func = get_attribute_from_bases(cls_, cls_var_name)
+ if shim is not None:
+ func = shim(func)
+ func = unwrap_wrapped_function(func, unwrap_partial=False)
+ if not callable(func):
+ # This branch will get hit for classmethod properties
+ attribute = get_attribute_from_base_dicts(cls_, cls_var_name) # prevents the binding call to `__get__`
+ if isinstance(attribute, PydanticDescriptorProxy):
+ func = unwrap_wrapped_function(attribute.wrapped)
+ return Decorator(
+ cls_ref=get_type_ref(cls_),
+ cls_var_name=cls_var_name,
+ func=func,
+ shim=shim,
+ info=info,
+ )
+
+ def bind_to_cls(self, cls: Any) -> Decorator[DecoratorInfoType]:
+ """Bind the decorator to a class.
+
+ Args:
+ cls: the class.
+
+ Returns:
+ The new decorator instance.
+ """
+ return self.build(
+ cls,
+ cls_var_name=self.cls_var_name,
+ shim=self.shim,
+ info=self.info,
+ )
+
+
+def get_bases(tp: type[Any]) -> tuple[type[Any], ...]:
+ """Get the base classes of a class or typeddict.
+
+ Args:
+ tp: The type or class to get the bases.
+
+ Returns:
+ The base classes.
+ """
+ if is_typeddict(tp):
+ return tp.__orig_bases__ # type: ignore
+ try:
+ return tp.__bases__
+ except AttributeError:
+ return ()
+
+
+def mro(tp: type[Any]) -> tuple[type[Any], ...]:
+ """Calculate the Method Resolution Order of bases using the C3 algorithm.
+
+ See https://www.python.org/download/releases/2.3/mro/
+ """
+ # try to use the existing mro, for performance mainly
+ # but also because it helps verify the implementation below
+ if not is_typeddict(tp):
+ try:
+ return tp.__mro__
+ except AttributeError:
+ # GenericAlias and some other cases
+ pass
+
+ bases = get_bases(tp)
+ return (tp,) + mro_for_bases(bases)
+
+
+def mro_for_bases(bases: tuple[type[Any], ...]) -> tuple[type[Any], ...]:
+ def merge_seqs(seqs: list[deque[type[Any]]]) -> Iterable[type[Any]]:
+ while True:
+ non_empty = [seq for seq in seqs if seq]
+ if not non_empty:
+ # Nothing left to process, we're done.
+ return
+ candidate: type[Any] | None = None
+ for seq in non_empty: # Find merge candidates among seq heads.
+ candidate = seq[0]
+ not_head = [s for s in non_empty if candidate in islice(s, 1, None)]
+ if not_head:
+ # Reject the candidate.
+ candidate = None
+ else:
+ break
+ if not candidate:
+ raise TypeError('Inconsistent hierarchy, no C3 MRO is possible')
+ yield candidate
+ for seq in non_empty:
+ # Remove candidate.
+ if seq[0] == candidate:
+ seq.popleft()
+
+ seqs = [deque(mro(base)) for base in bases] + [deque(bases)]
+ return tuple(merge_seqs(seqs))
+
+
+_sentinel = object()
+
+
+def get_attribute_from_bases(tp: type[Any] | tuple[type[Any], ...], name: str) -> Any:
+ """Get the attribute from the next class in the MRO that has it,
+ aiming to simulate calling the method on the actual class.
+
+ The reason for iterating over the mro instead of just getting
+ the attribute (which would do that for us) is to support TypedDict,
+ which lacks a real __mro__, but can have a virtual one constructed
+ from its bases (as done here).
+
+ Args:
+ tp: The type or class to search for the attribute. If a tuple, this is treated as a set of base classes.
+ name: The name of the attribute to retrieve.
+
+ Returns:
+ Any: The attribute value, if found.
+
+ Raises:
+ AttributeError: If the attribute is not found in any class in the MRO.
+ """
+ if isinstance(tp, tuple):
+ for base in mro_for_bases(tp):
+ attribute = base.__dict__.get(name, _sentinel)
+ if attribute is not _sentinel:
+ attribute_get = getattr(attribute, '__get__', None)
+ if attribute_get is not None:
+ return attribute_get(None, tp)
+ return attribute
+ raise AttributeError(f'{name} not found in {tp}')
+ else:
+ try:
+ return getattr(tp, name)
+ except AttributeError:
+ return get_attribute_from_bases(mro(tp), name)
+
+
+def get_attribute_from_base_dicts(tp: type[Any], name: str) -> Any:
+ """Get an attribute out of the `__dict__` following the MRO.
+ This prevents the call to `__get__` on the descriptor, and allows
+ us to get the original function for classmethod properties.
+
+ Args:
+ tp: The type or class to search for the attribute.
+ name: The name of the attribute to retrieve.
+
+ Returns:
+ Any: The attribute value, if found.
+
+ Raises:
+ KeyError: If the attribute is not found in any class's `__dict__` in the MRO.
+ """
+ for base in reversed(mro(tp)):
+ if name in base.__dict__:
+ return base.__dict__[name]
+ return tp.__dict__[name] # raise the error
+
+
+@dataclass(**slots_true)
+class DecoratorInfos:
+ """Mapping of name in the class namespace to decorator info.
+
+ note that the name in the class namespace is the function or attribute name
+ not the field name!
+ """
+
+ validators: dict[str, Decorator[ValidatorDecoratorInfo]] = field(default_factory=dict)
+ field_validators: dict[str, Decorator[FieldValidatorDecoratorInfo]] = field(default_factory=dict)
+ root_validators: dict[str, Decorator[RootValidatorDecoratorInfo]] = field(default_factory=dict)
+ field_serializers: dict[str, Decorator[FieldSerializerDecoratorInfo]] = field(default_factory=dict)
+ model_serializers: dict[str, Decorator[ModelSerializerDecoratorInfo]] = field(default_factory=dict)
+ model_validators: dict[str, Decorator[ModelValidatorDecoratorInfo]] = field(default_factory=dict)
+ computed_fields: dict[str, Decorator[ComputedFieldInfo]] = field(default_factory=dict)
+
+ @staticmethod
+ def build(model_dc: type[Any]) -> DecoratorInfos: # noqa: C901 (ignore complexity)
+ """We want to collect all DecFunc instances that exist as
+ attributes in the namespace of the class (a BaseModel or dataclass)
+ that called us
+ But we want to collect these in the order of the bases
+ So instead of getting them all from the leaf class (the class that called us),
+ we traverse the bases from root (the oldest ancestor class) to leaf
+ and collect all of the instances as we go, taking care to replace
+ any duplicate ones with the last one we see to mimic how function overriding
+ works with inheritance.
+ If we do replace any functions we put the replacement into the position
+ the replaced function was in; that is, we maintain the order.
+ """
+ # reminder: dicts are ordered and replacement does not alter the order
+ res = DecoratorInfos()
+ for base in reversed(mro(model_dc)[1:]):
+ existing: DecoratorInfos | None = base.__dict__.get('__pydantic_decorators__')
+ if existing is None:
+ existing = DecoratorInfos.build(base)
+ res.validators.update({k: v.bind_to_cls(model_dc) for k, v in existing.validators.items()})
+ res.field_validators.update({k: v.bind_to_cls(model_dc) for k, v in existing.field_validators.items()})
+ res.root_validators.update({k: v.bind_to_cls(model_dc) for k, v in existing.root_validators.items()})
+ res.field_serializers.update({k: v.bind_to_cls(model_dc) for k, v in existing.field_serializers.items()})
+ res.model_serializers.update({k: v.bind_to_cls(model_dc) for k, v in existing.model_serializers.items()})
+ res.model_validators.update({k: v.bind_to_cls(model_dc) for k, v in existing.model_validators.items()})
+ res.computed_fields.update({k: v.bind_to_cls(model_dc) for k, v in existing.computed_fields.items()})
+
+ to_replace: list[tuple[str, Any]] = []
+
+ for var_name, var_value in vars(model_dc).items():
+ if isinstance(var_value, PydanticDescriptorProxy):
+ info = var_value.decorator_info
+ if isinstance(info, ValidatorDecoratorInfo):
+ res.validators[var_name] = Decorator.build(
+ model_dc, cls_var_name=var_name, shim=var_value.shim, info=info
+ )
+ elif isinstance(info, FieldValidatorDecoratorInfo):
+ res.field_validators[var_name] = Decorator.build(
+ model_dc, cls_var_name=var_name, shim=var_value.shim, info=info
+ )
+ elif isinstance(info, RootValidatorDecoratorInfo):
+ res.root_validators[var_name] = Decorator.build(
+ model_dc, cls_var_name=var_name, shim=var_value.shim, info=info
+ )
+ elif isinstance(info, FieldSerializerDecoratorInfo):
+ # check whether a serializer function is already registered for fields
+ for field_serializer_decorator in res.field_serializers.values():
+ # check that each field has at most one serializer function.
+ # serializer functions for the same field in subclasses are allowed,
+ # and are treated as overrides
+ if field_serializer_decorator.cls_var_name == var_name:
+ continue
+ for f in info.fields:
+ if f in field_serializer_decorator.info.fields:
+ raise PydanticUserError(
+ 'Multiple field serializer functions were defined '
+ f'for field {f!r}, this is not allowed.',
+ code='multiple-field-serializers',
+ )
+ res.field_serializers[var_name] = Decorator.build(
+ model_dc, cls_var_name=var_name, shim=var_value.shim, info=info
+ )
+ elif isinstance(info, ModelValidatorDecoratorInfo):
+ res.model_validators[var_name] = Decorator.build(
+ model_dc, cls_var_name=var_name, shim=var_value.shim, info=info
+ )
+ elif isinstance(info, ModelSerializerDecoratorInfo):
+ res.model_serializers[var_name] = Decorator.build(
+ model_dc, cls_var_name=var_name, shim=var_value.shim, info=info
+ )
+ else:
+ from ..fields import ComputedFieldInfo
+
+ isinstance(var_value, ComputedFieldInfo)
+ res.computed_fields[var_name] = Decorator.build(
+ model_dc, cls_var_name=var_name, shim=None, info=info
+ )
+ to_replace.append((var_name, var_value.wrapped))
+ if to_replace:
+ # If we can save `__pydantic_decorators__` on the class we'll be able to check for it above
+ # so then we don't need to re-process the type, which means we can discard our descriptor wrappers
+ # and replace them with the thing they are wrapping (see the other setattr call below)
+ # which allows validator class methods to also function as regular class methods
+ model_dc.__pydantic_decorators__ = res
+ for name, value in to_replace:
+ setattr(model_dc, name, value)
+ return res
+
+
+def inspect_validator(validator: Callable[..., Any], mode: FieldValidatorModes) -> bool:
+ """Look at a field or model validator function and determine whether it takes an info argument.
+
+ An error is raised if the function has an invalid signature.
+
+ Args:
+ validator: The validator function to inspect.
+ mode: The proposed validator mode.
+
+ Returns:
+ Whether the validator takes an info argument.
+ """
+ try:
+ sig = signature(validator)
+ except (ValueError, TypeError):
+ # `inspect.signature` might not be able to infer a signature, e.g. with C objects.
+ # In this case, we assume no info argument is present:
+ return False
+ n_positional = count_positional_required_params(sig)
+ if mode == 'wrap':
+ if n_positional == 3:
+ return True
+ elif n_positional == 2:
+ return False
+ else:
+ assert mode in {'before', 'after', 'plain'}, f"invalid mode: {mode!r}, expected 'before', 'after' or 'plain"
+ if n_positional == 2:
+ return True
+ elif n_positional == 1:
+ return False
+
+ raise PydanticUserError(
+ f'Unrecognized field_validator function signature for {validator} with `mode={mode}`:{sig}',
+ code='validator-signature',
+ )
+
+
+def inspect_field_serializer(serializer: Callable[..., Any], mode: Literal['plain', 'wrap']) -> tuple[bool, bool]:
+ """Look at a field serializer function and determine if it is a field serializer,
+ and whether it takes an info argument.
+
+ An error is raised if the function has an invalid signature.
+
+ Args:
+ serializer: The serializer function to inspect.
+ mode: The serializer mode, either 'plain' or 'wrap'.
+
+ Returns:
+ Tuple of (is_field_serializer, info_arg).
+ """
+ try:
+ sig = signature(serializer)
+ except (ValueError, TypeError):
+ # `inspect.signature` might not be able to infer a signature, e.g. with C objects.
+ # In this case, we assume no info argument is present and this is not a method:
+ return (False, False)
+
+ first = next(iter(sig.parameters.values()), None)
+ is_field_serializer = first is not None and first.name == 'self'
+
+ n_positional = count_positional_required_params(sig)
+ if is_field_serializer:
+ # -1 to correct for self parameter
+ info_arg = _serializer_info_arg(mode, n_positional - 1)
+ else:
+ info_arg = _serializer_info_arg(mode, n_positional)
+
+ if info_arg is None:
+ raise PydanticUserError(
+ f'Unrecognized field_serializer function signature for {serializer} with `mode={mode}`:{sig}',
+ code='field-serializer-signature',
+ )
+
+ return is_field_serializer, info_arg
+
+
+def inspect_annotated_serializer(serializer: Callable[..., Any], mode: Literal['plain', 'wrap']) -> bool:
+ """Look at a serializer function used via `Annotated` and determine whether it takes an info argument.
+
+ An error is raised if the function has an invalid signature.
+
+ Args:
+ serializer: The serializer function to check.
+ mode: The serializer mode, either 'plain' or 'wrap'.
+
+ Returns:
+ info_arg
+ """
+ try:
+ sig = signature(serializer)
+ except (ValueError, TypeError):
+ # `inspect.signature` might not be able to infer a signature, e.g. with C objects.
+ # In this case, we assume no info argument is present:
+ return False
+ info_arg = _serializer_info_arg(mode, count_positional_required_params(sig))
+ if info_arg is None:
+ raise PydanticUserError(
+ f'Unrecognized field_serializer function signature for {serializer} with `mode={mode}`:{sig}',
+ code='field-serializer-signature',
+ )
+ else:
+ return info_arg
+
+
+def inspect_model_serializer(serializer: Callable[..., Any], mode: Literal['plain', 'wrap']) -> bool:
+ """Look at a model serializer function and determine whether it takes an info argument.
+
+ An error is raised if the function has an invalid signature.
+
+ Args:
+ serializer: The serializer function to check.
+ mode: The serializer mode, either 'plain' or 'wrap'.
+
+ Returns:
+ `info_arg` - whether the function expects an info argument.
+ """
+ if isinstance(serializer, (staticmethod, classmethod)) or not is_instance_method_from_sig(serializer):
+ raise PydanticUserError(
+ '`@model_serializer` must be applied to instance methods', code='model-serializer-instance-method'
+ )
+
+ sig = signature(serializer)
+ info_arg = _serializer_info_arg(mode, count_positional_required_params(sig))
+ if info_arg is None:
+ raise PydanticUserError(
+ f'Unrecognized model_serializer function signature for {serializer} with `mode={mode}`:{sig}',
+ code='model-serializer-signature',
+ )
+ else:
+ return info_arg
+
+
+def _serializer_info_arg(mode: Literal['plain', 'wrap'], n_positional: int) -> bool | None:
+ if mode == 'plain':
+ if n_positional == 1:
+ # (input_value: Any, /) -> Any
+ return False
+ elif n_positional == 2:
+ # (model: Any, input_value: Any, /) -> Any
+ return True
+ else:
+ assert mode == 'wrap', f"invalid mode: {mode!r}, expected 'plain' or 'wrap'"
+ if n_positional == 2:
+ # (input_value: Any, serializer: SerializerFunctionWrapHandler, /) -> Any
+ return False
+ elif n_positional == 3:
+ # (input_value: Any, serializer: SerializerFunctionWrapHandler, info: SerializationInfo, /) -> Any
+ return True
+
+ return None
+
+
+AnyDecoratorCallable: TypeAlias = (
+ 'Union[classmethod[Any, Any, Any], staticmethod[Any, Any], partialmethod[Any], Callable[..., Any]]'
+)
+
+
+def is_instance_method_from_sig(function: AnyDecoratorCallable) -> bool:
+ """Whether the function is an instance method.
+
+ It will consider a function as instance method if the first parameter of
+ function is `self`.
+
+ Args:
+ function: The function to check.
+
+ Returns:
+ `True` if the function is an instance method, `False` otherwise.
+ """
+ sig = signature(unwrap_wrapped_function(function))
+ first = next(iter(sig.parameters.values()), None)
+ if first and first.name == 'self':
+ return True
+ return False
+
+
+def ensure_classmethod_based_on_signature(function: AnyDecoratorCallable) -> Any:
+ """Apply the `@classmethod` decorator on the function.
+
+ Args:
+ function: The function to apply the decorator on.
+
+ Return:
+ The `@classmethod` decorator applied function.
+ """
+ if not isinstance(
+ unwrap_wrapped_function(function, unwrap_class_static_method=False), classmethod
+ ) and _is_classmethod_from_sig(function):
+ return classmethod(function) # type: ignore[arg-type]
+ return function
+
+
+def _is_classmethod_from_sig(function: AnyDecoratorCallable) -> bool:
+ sig = signature(unwrap_wrapped_function(function))
+ first = next(iter(sig.parameters.values()), None)
+ if first and first.name == 'cls':
+ return True
+ return False
+
+
+def unwrap_wrapped_function(
+ func: Any,
+ *,
+ unwrap_partial: bool = True,
+ unwrap_class_static_method: bool = True,
+) -> Any:
+ """Recursively unwraps a wrapped function until the underlying function is reached.
+ This handles property, functools.partial, functools.partialmethod, staticmethod, and classmethod.
+
+ Args:
+ func: The function to unwrap.
+ unwrap_partial: If True (default), unwrap partial and partialmethod decorators.
+ unwrap_class_static_method: If True (default), also unwrap classmethod and staticmethod
+ decorators. If False, only unwrap partial and partialmethod decorators.
+
+ Returns:
+ The underlying function of the wrapped function.
+ """
+ # Define the types we want to check against as a single tuple.
+ unwrap_types = (
+ (property, cached_property)
+ + ((partial, partialmethod) if unwrap_partial else ())
+ + ((staticmethod, classmethod) if unwrap_class_static_method else ())
+ )
+
+ while isinstance(func, unwrap_types):
+ if unwrap_class_static_method and isinstance(func, (classmethod, staticmethod)):
+ func = func.__func__
+ elif isinstance(func, (partial, partialmethod)):
+ func = func.func
+ elif isinstance(func, property):
+ func = func.fget # arbitrary choice, convenient for computed fields
+ else:
+ # Make coverage happy as it can only get here in the last possible case
+ assert isinstance(func, cached_property)
+ func = func.func # type: ignore
+
+ return func
+
+
+def get_function_return_type(
+ func: Any,
+ explicit_return_type: Any,
+ globalns: GlobalsNamespace | None = None,
+ localns: MappingNamespace | None = None,
+) -> Any:
+ """Get the function return type.
+
+ It gets the return type from the type annotation if `explicit_return_type` is `None`.
+ Otherwise, it returns `explicit_return_type`.
+
+ Args:
+ func: The function to get its return type.
+ explicit_return_type: The explicit return type.
+ globalns: The globals namespace to use during type annotation evaluation.
+ localns: The locals namespace to use during type annotation evaluation.
+
+ Returns:
+ The function return type.
+ """
+ if explicit_return_type is PydanticUndefined:
+ # try to get it from the type annotation
+ hints = get_function_type_hints(
+ unwrap_wrapped_function(func),
+ include_keys={'return'},
+ globalns=globalns,
+ localns=localns,
+ )
+ return hints.get('return', PydanticUndefined)
+ else:
+ return explicit_return_type
+
+
+def count_positional_required_params(sig: Signature) -> int:
+ """Get the number of positional (required) arguments of a signature.
+
+ This function should only be used to inspect signatures of validation and serialization functions.
+ The first argument (the value being serialized or validated) is counted as a required argument
+ even if a default value exists.
+
+ Returns:
+ The number of positional arguments of a signature.
+ """
+ parameters = list(sig.parameters.values())
+ return sum(
+ 1
+ for param in parameters
+ if can_be_positional(param)
+ # First argument is the value being validated/serialized, and can have a default value
+ # (e.g. `float`, which has signature `(x=0, /)`). We assume other parameters (the info arg
+ # for instance) should be required, and thus without any default value.
+ and (param.default is Parameter.empty or param is parameters[0])
+ )
+
+
+def ensure_property(f: Any) -> Any:
+ """Ensure that a function is a `property` or `cached_property`, or is a valid descriptor.
+
+ Args:
+ f: The function to check.
+
+ Returns:
+ The function, or a `property` or `cached_property` instance wrapping the function.
+ """
+ if ismethoddescriptor(f) or isdatadescriptor(f):
+ return f
+ else:
+ return property(f)
diff --git a/.venv/lib/python3.12/site-packages/pydantic/_internal/_decorators_v1.py b/.venv/lib/python3.12/site-packages/pydantic/_internal/_decorators_v1.py
new file mode 100644
index 00000000..2dfa3f2a
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pydantic/_internal/_decorators_v1.py
@@ -0,0 +1,174 @@
+"""Logic for V1 validators, e.g. `@validator` and `@root_validator`."""
+
+from __future__ import annotations as _annotations
+
+from inspect import Parameter, signature
+from typing import Any, Dict, Tuple, Union, cast
+
+from pydantic_core import core_schema
+from typing_extensions import Protocol
+
+from ..errors import PydanticUserError
+from ._utils import can_be_positional
+
+
+class V1OnlyValueValidator(Protocol):
+ """A simple validator, supported for V1 validators and V2 validators."""
+
+ def __call__(self, __value: Any) -> Any: ...
+
+
+class V1ValidatorWithValues(Protocol):
+ """A validator with `values` argument, supported for V1 validators and V2 validators."""
+
+ def __call__(self, __value: Any, values: dict[str, Any]) -> Any: ...
+
+
+class V1ValidatorWithValuesKwOnly(Protocol):
+ """A validator with keyword only `values` argument, supported for V1 validators and V2 validators."""
+
+ def __call__(self, __value: Any, *, values: dict[str, Any]) -> Any: ...
+
+
+class V1ValidatorWithKwargs(Protocol):
+ """A validator with `kwargs` argument, supported for V1 validators and V2 validators."""
+
+ def __call__(self, __value: Any, **kwargs: Any) -> Any: ...
+
+
+class V1ValidatorWithValuesAndKwargs(Protocol):
+ """A validator with `values` and `kwargs` arguments, supported for V1 validators and V2 validators."""
+
+ def __call__(self, __value: Any, values: dict[str, Any], **kwargs: Any) -> Any: ...
+
+
+V1Validator = Union[
+ V1ValidatorWithValues, V1ValidatorWithValuesKwOnly, V1ValidatorWithKwargs, V1ValidatorWithValuesAndKwargs
+]
+
+
+def can_be_keyword(param: Parameter) -> bool:
+ return param.kind in (Parameter.POSITIONAL_OR_KEYWORD, Parameter.KEYWORD_ONLY)
+
+
+def make_generic_v1_field_validator(validator: V1Validator) -> core_schema.WithInfoValidatorFunction:
+ """Wrap a V1 style field validator for V2 compatibility.
+
+ Args:
+ validator: The V1 style field validator.
+
+ Returns:
+ A wrapped V2 style field validator.
+
+ Raises:
+ PydanticUserError: If the signature is not supported or the parameters are
+ not available in Pydantic V2.
+ """
+ sig = signature(validator)
+
+ needs_values_kw = False
+
+ for param_num, (param_name, parameter) in enumerate(sig.parameters.items()):
+ if can_be_keyword(parameter) and param_name in ('field', 'config'):
+ raise PydanticUserError(
+ 'The `field` and `config` parameters are not available in Pydantic V2, '
+ 'please use the `info` parameter instead.',
+ code='validator-field-config-info',
+ )
+ if parameter.kind is Parameter.VAR_KEYWORD:
+ needs_values_kw = True
+ elif can_be_keyword(parameter) and param_name == 'values':
+ needs_values_kw = True
+ elif can_be_positional(parameter) and param_num == 0:
+ # value
+ continue
+ elif parameter.default is Parameter.empty: # ignore params with defaults e.g. bound by functools.partial
+ raise PydanticUserError(
+ f'Unsupported signature for V1 style validator {validator}: {sig} is not supported.',
+ code='validator-v1-signature',
+ )
+
+ if needs_values_kw:
+ # (v, **kwargs), (v, values, **kwargs), (v, *, values, **kwargs) or (v, *, values)
+ val1 = cast(V1ValidatorWithValues, validator)
+
+ def wrapper1(value: Any, info: core_schema.ValidationInfo) -> Any:
+ return val1(value, values=info.data)
+
+ return wrapper1
+ else:
+ val2 = cast(V1OnlyValueValidator, validator)
+
+ def wrapper2(value: Any, _: core_schema.ValidationInfo) -> Any:
+ return val2(value)
+
+ return wrapper2
+
+
+RootValidatorValues = Dict[str, Any]
+# technically tuple[model_dict, model_extra, fields_set] | tuple[dataclass_dict, init_vars]
+RootValidatorFieldsTuple = Tuple[Any, ...]
+
+
+class V1RootValidatorFunction(Protocol):
+ """A simple root validator, supported for V1 validators and V2 validators."""
+
+ def __call__(self, __values: RootValidatorValues) -> RootValidatorValues: ...
+
+
+class V2CoreBeforeRootValidator(Protocol):
+ """V2 validator with mode='before'."""
+
+ def __call__(self, __values: RootValidatorValues, __info: core_schema.ValidationInfo) -> RootValidatorValues: ...
+
+
+class V2CoreAfterRootValidator(Protocol):
+ """V2 validator with mode='after'."""
+
+ def __call__(
+ self, __fields_tuple: RootValidatorFieldsTuple, __info: core_schema.ValidationInfo
+ ) -> RootValidatorFieldsTuple: ...
+
+
+def make_v1_generic_root_validator(
+ validator: V1RootValidatorFunction, pre: bool
+) -> V2CoreBeforeRootValidator | V2CoreAfterRootValidator:
+ """Wrap a V1 style root validator for V2 compatibility.
+
+ Args:
+ validator: The V1 style field validator.
+ pre: Whether the validator is a pre validator.
+
+ Returns:
+ A wrapped V2 style validator.
+ """
+ if pre is True:
+ # mode='before' for pydantic-core
+ def _wrapper1(values: RootValidatorValues, _: core_schema.ValidationInfo) -> RootValidatorValues:
+ return validator(values)
+
+ return _wrapper1
+
+ # mode='after' for pydantic-core
+ def _wrapper2(fields_tuple: RootValidatorFieldsTuple, _: core_schema.ValidationInfo) -> RootValidatorFieldsTuple:
+ if len(fields_tuple) == 2:
+ # dataclass, this is easy
+ values, init_vars = fields_tuple
+ values = validator(values)
+ return values, init_vars
+ else:
+ # ugly hack: to match v1 behaviour, we merge values and model_extra, then split them up based on fields
+ # afterwards
+ model_dict, model_extra, fields_set = fields_tuple
+ if model_extra:
+ fields = set(model_dict.keys())
+ model_dict.update(model_extra)
+ model_dict_new = validator(model_dict)
+ for k in list(model_dict_new.keys()):
+ if k not in fields:
+ model_extra[k] = model_dict_new.pop(k)
+ else:
+ model_dict_new = validator(model_dict)
+ return model_dict_new, model_extra, fields_set
+
+ return _wrapper2
diff --git a/.venv/lib/python3.12/site-packages/pydantic/_internal/_discriminated_union.py b/.venv/lib/python3.12/site-packages/pydantic/_internal/_discriminated_union.py
new file mode 100644
index 00000000..29a50a5a
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pydantic/_internal/_discriminated_union.py
@@ -0,0 +1,503 @@
+from __future__ import annotations as _annotations
+
+from typing import TYPE_CHECKING, Any, Hashable, Sequence
+
+from pydantic_core import CoreSchema, core_schema
+
+from ..errors import PydanticUserError
+from . import _core_utils
+from ._core_utils import (
+ CoreSchemaField,
+ collect_definitions,
+)
+
+if TYPE_CHECKING:
+ from ..types import Discriminator
+
+CORE_SCHEMA_METADATA_DISCRIMINATOR_PLACEHOLDER_KEY = 'pydantic.internal.union_discriminator'
+
+
+class MissingDefinitionForUnionRef(Exception):
+ """Raised when applying a discriminated union discriminator to a schema
+ requires a definition that is not yet defined
+ """
+
+ def __init__(self, ref: str) -> None:
+ self.ref = ref
+ super().__init__(f'Missing definition for ref {self.ref!r}')
+
+
+def set_discriminator_in_metadata(schema: CoreSchema, discriminator: Any) -> None:
+ schema.setdefault('metadata', {})
+ metadata = schema.get('metadata')
+ assert metadata is not None
+ metadata[CORE_SCHEMA_METADATA_DISCRIMINATOR_PLACEHOLDER_KEY] = discriminator
+
+
+def apply_discriminators(schema: core_schema.CoreSchema) -> core_schema.CoreSchema:
+ # We recursively walk through the `schema` passed to `apply_discriminators`, applying discriminators
+ # where necessary at each level. During this recursion, we allow references to be resolved from the definitions
+ # that are originally present on the original, outermost `schema`. Before `apply_discriminators` is called,
+ # `simplify_schema_references` is called on the schema (in the `clean_schema` function),
+ # which often puts the definitions in the outermost schema.
+ global_definitions: dict[str, CoreSchema] = collect_definitions(schema)
+
+ def inner(s: core_schema.CoreSchema, recurse: _core_utils.Recurse) -> core_schema.CoreSchema:
+ nonlocal global_definitions
+
+ s = recurse(s, inner)
+ if s['type'] == 'tagged-union':
+ return s
+
+ metadata = s.get('metadata', {})
+ discriminator = metadata.pop(CORE_SCHEMA_METADATA_DISCRIMINATOR_PLACEHOLDER_KEY, None)
+ if discriminator is not None:
+ s = apply_discriminator(s, discriminator, global_definitions)
+ return s
+
+ return _core_utils.walk_core_schema(schema, inner, copy=False)
+
+
+def apply_discriminator(
+ schema: core_schema.CoreSchema,
+ discriminator: str | Discriminator,
+ definitions: dict[str, core_schema.CoreSchema] | None = None,
+) -> core_schema.CoreSchema:
+ """Applies the discriminator and returns a new core schema.
+
+ Args:
+ schema: The input schema.
+ discriminator: The name of the field which will serve as the discriminator.
+ definitions: A mapping of schema ref to schema.
+
+ Returns:
+ The new core schema.
+
+ Raises:
+ TypeError:
+ - If `discriminator` is used with invalid union variant.
+ - If `discriminator` is used with `Union` type with one variant.
+ - If `discriminator` value mapped to multiple choices.
+ MissingDefinitionForUnionRef:
+ If the definition for ref is missing.
+ PydanticUserError:
+ - If a model in union doesn't have a discriminator field.
+ - If discriminator field has a non-string alias.
+ - If discriminator fields have different aliases.
+ - If discriminator field not of type `Literal`.
+ """
+ from ..types import Discriminator
+
+ if isinstance(discriminator, Discriminator):
+ if isinstance(discriminator.discriminator, str):
+ discriminator = discriminator.discriminator
+ else:
+ return discriminator._convert_schema(schema)
+
+ return _ApplyInferredDiscriminator(discriminator, definitions or {}).apply(schema)
+
+
+class _ApplyInferredDiscriminator:
+ """This class is used to convert an input schema containing a union schema into one where that union is
+ replaced with a tagged-union, with all the associated debugging and performance benefits.
+
+ This is done by:
+ * Validating that the input schema is compatible with the provided discriminator
+ * Introspecting the schema to determine which discriminator values should map to which union choices
+ * Handling various edge cases such as 'definitions', 'default', 'nullable' schemas, and more
+
+ I have chosen to implement the conversion algorithm in this class, rather than a function,
+ to make it easier to maintain state while recursively walking the provided CoreSchema.
+ """
+
+ def __init__(self, discriminator: str, definitions: dict[str, core_schema.CoreSchema]):
+ # `discriminator` should be the name of the field which will serve as the discriminator.
+ # It must be the python name of the field, and *not* the field's alias. Note that as of now,
+ # all members of a discriminated union _must_ use a field with the same name as the discriminator.
+ # This may change if/when we expose a way to manually specify the TaggedUnionSchema's choices.
+ self.discriminator = discriminator
+
+ # `definitions` should contain a mapping of schema ref to schema for all schemas which might
+ # be referenced by some choice
+ self.definitions = definitions
+
+ # `_discriminator_alias` will hold the value, if present, of the alias for the discriminator
+ #
+ # Note: following the v1 implementation, we currently disallow the use of different aliases
+ # for different choices. This is not a limitation of pydantic_core, but if we try to handle
+ # this, the inference logic gets complicated very quickly, and could result in confusing
+ # debugging challenges for users making subtle mistakes.
+ #
+ # Rather than trying to do the most powerful inference possible, I think we should eventually
+ # expose a way to more-manually control the way the TaggedUnionSchema is constructed through
+ # the use of a new type which would be placed as an Annotation on the Union type. This would
+ # provide the full flexibility/power of pydantic_core's TaggedUnionSchema where necessary for
+ # more complex cases, without over-complicating the inference logic for the common cases.
+ self._discriminator_alias: str | None = None
+
+ # `_should_be_nullable` indicates whether the converted union has `None` as an allowed value.
+ # If `None` is an acceptable value of the (possibly-wrapped) union, we ignore it while
+ # constructing the TaggedUnionSchema, but set the `_should_be_nullable` attribute to True.
+ # Once we have constructed the TaggedUnionSchema, if `_should_be_nullable` is True, we ensure
+ # that the final schema gets wrapped as a NullableSchema. This has the same semantics on the
+ # python side, but resolves the issue that `None` cannot correspond to any discriminator values.
+ self._should_be_nullable = False
+
+ # `_is_nullable` is used to track if the final produced schema will definitely be nullable;
+ # we set it to True if the input schema is wrapped in a nullable schema that we know will be preserved
+ # as an indication that, even if None is discovered as one of the union choices, we will not need to wrap
+ # the final value in another nullable schema.
+ #
+ # This is more complicated than just checking for the final outermost schema having type 'nullable' thanks
+ # to the possible presence of other wrapper schemas such as DefinitionsSchema, WithDefaultSchema, etc.
+ self._is_nullable = False
+
+ # `_choices_to_handle` serves as a stack of choices to add to the tagged union. Initially, choices
+ # from the union in the wrapped schema will be appended to this list, and the recursive choice-handling
+ # algorithm may add more choices to this stack as (nested) unions are encountered.
+ self._choices_to_handle: list[core_schema.CoreSchema] = []
+
+ # `_tagged_union_choices` is built during the call to `apply`, and will hold the choices to be included
+ # in the output TaggedUnionSchema that will replace the union from the input schema
+ self._tagged_union_choices: dict[Hashable, core_schema.CoreSchema] = {}
+
+ # `_used` is changed to True after applying the discriminator to prevent accidental reuse
+ self._used = False
+
+ def apply(self, schema: core_schema.CoreSchema) -> core_schema.CoreSchema:
+ """Return a new CoreSchema based on `schema` that uses a tagged-union with the discriminator provided
+ to this class.
+
+ Args:
+ schema: The input schema.
+
+ Returns:
+ The new core schema.
+
+ Raises:
+ TypeError:
+ - If `discriminator` is used with invalid union variant.
+ - If `discriminator` is used with `Union` type with one variant.
+ - If `discriminator` value mapped to multiple choices.
+ ValueError:
+ If the definition for ref is missing.
+ PydanticUserError:
+ - If a model in union doesn't have a discriminator field.
+ - If discriminator field has a non-string alias.
+ - If discriminator fields have different aliases.
+ - If discriminator field not of type `Literal`.
+ """
+ assert not self._used
+ schema = self._apply_to_root(schema)
+ if self._should_be_nullable and not self._is_nullable:
+ schema = core_schema.nullable_schema(schema)
+ self._used = True
+ return schema
+
+ def _apply_to_root(self, schema: core_schema.CoreSchema) -> core_schema.CoreSchema:
+ """This method handles the outer-most stage of recursion over the input schema:
+ unwrapping nullable or definitions schemas, and calling the `_handle_choice`
+ method iteratively on the choices extracted (recursively) from the possibly-wrapped union.
+ """
+ if schema['type'] == 'nullable':
+ self._is_nullable = True
+ wrapped = self._apply_to_root(schema['schema'])
+ nullable_wrapper = schema.copy()
+ nullable_wrapper['schema'] = wrapped
+ return nullable_wrapper
+
+ if schema['type'] == 'definitions':
+ wrapped = self._apply_to_root(schema['schema'])
+ definitions_wrapper = schema.copy()
+ definitions_wrapper['schema'] = wrapped
+ return definitions_wrapper
+
+ if schema['type'] != 'union':
+ # If the schema is not a union, it probably means it just had a single member and
+ # was flattened by pydantic_core.
+ # However, it still may make sense to apply the discriminator to this schema,
+ # as a way to get discriminated-union-style error messages, so we allow this here.
+ schema = core_schema.union_schema([schema])
+
+ # Reverse the choices list before extending the stack so that they get handled in the order they occur
+ choices_schemas = [v[0] if isinstance(v, tuple) else v for v in schema['choices'][::-1]]
+ self._choices_to_handle.extend(choices_schemas)
+ while self._choices_to_handle:
+ choice = self._choices_to_handle.pop()
+ self._handle_choice(choice)
+
+ if self._discriminator_alias is not None and self._discriminator_alias != self.discriminator:
+ # * We need to annotate `discriminator` as a union here to handle both branches of this conditional
+ # * We need to annotate `discriminator` as list[list[str | int]] and not list[list[str]] due to the
+ # invariance of list, and because list[list[str | int]] is the type of the discriminator argument
+ # to tagged_union_schema below
+ # * See the docstring of pydantic_core.core_schema.tagged_union_schema for more details about how to
+ # interpret the value of the discriminator argument to tagged_union_schema. (The list[list[str]] here
+ # is the appropriate way to provide a list of fallback attributes to check for a discriminator value.)
+ discriminator: str | list[list[str | int]] = [[self.discriminator], [self._discriminator_alias]]
+ else:
+ discriminator = self.discriminator
+ return core_schema.tagged_union_schema(
+ choices=self._tagged_union_choices,
+ discriminator=discriminator,
+ custom_error_type=schema.get('custom_error_type'),
+ custom_error_message=schema.get('custom_error_message'),
+ custom_error_context=schema.get('custom_error_context'),
+ strict=False,
+ from_attributes=True,
+ ref=schema.get('ref'),
+ metadata=schema.get('metadata'),
+ serialization=schema.get('serialization'),
+ )
+
+ def _handle_choice(self, choice: core_schema.CoreSchema) -> None:
+ """This method handles the "middle" stage of recursion over the input schema.
+ Specifically, it is responsible for handling each choice of the outermost union
+ (and any "coalesced" choices obtained from inner unions).
+
+ Here, "handling" entails:
+ * Coalescing nested unions and compatible tagged-unions
+ * Tracking the presence of 'none' and 'nullable' schemas occurring as choices
+ * Validating that each allowed discriminator value maps to a unique choice
+ * Updating the _tagged_union_choices mapping that will ultimately be used to build the TaggedUnionSchema.
+ """
+ if choice['type'] == 'definition-ref':
+ if choice['schema_ref'] not in self.definitions:
+ raise MissingDefinitionForUnionRef(choice['schema_ref'])
+
+ if choice['type'] == 'none':
+ self._should_be_nullable = True
+ elif choice['type'] == 'definitions':
+ self._handle_choice(choice['schema'])
+ elif choice['type'] == 'nullable':
+ self._should_be_nullable = True
+ self._handle_choice(choice['schema']) # unwrap the nullable schema
+ elif choice['type'] == 'union':
+ # Reverse the choices list before extending the stack so that they get handled in the order they occur
+ choices_schemas = [v[0] if isinstance(v, tuple) else v for v in choice['choices'][::-1]]
+ self._choices_to_handle.extend(choices_schemas)
+ elif choice['type'] not in {
+ 'model',
+ 'typed-dict',
+ 'tagged-union',
+ 'lax-or-strict',
+ 'dataclass',
+ 'dataclass-args',
+ 'definition-ref',
+ } and not _core_utils.is_function_with_inner_schema(choice):
+ # We should eventually handle 'definition-ref' as well
+ raise TypeError(
+ f'{choice["type"]!r} is not a valid discriminated union variant;'
+ ' should be a `BaseModel` or `dataclass`'
+ )
+ else:
+ if choice['type'] == 'tagged-union' and self._is_discriminator_shared(choice):
+ # In this case, this inner tagged-union is compatible with the outer tagged-union,
+ # and its choices can be coalesced into the outer TaggedUnionSchema.
+ subchoices = [x for x in choice['choices'].values() if not isinstance(x, (str, int))]
+ # Reverse the choices list before extending the stack so that they get handled in the order they occur
+ self._choices_to_handle.extend(subchoices[::-1])
+ return
+
+ inferred_discriminator_values = self._infer_discriminator_values_for_choice(choice, source_name=None)
+ self._set_unique_choice_for_values(choice, inferred_discriminator_values)
+
+ def _is_discriminator_shared(self, choice: core_schema.TaggedUnionSchema) -> bool:
+ """This method returns a boolean indicating whether the discriminator for the `choice`
+ is the same as that being used for the outermost tagged union. This is used to
+ determine whether this TaggedUnionSchema choice should be "coalesced" into the top level,
+ or whether it should be treated as a separate (nested) choice.
+ """
+ inner_discriminator = choice['discriminator']
+ return inner_discriminator == self.discriminator or (
+ isinstance(inner_discriminator, list)
+ and (self.discriminator in inner_discriminator or [self.discriminator] in inner_discriminator)
+ )
+
+ def _infer_discriminator_values_for_choice( # noqa C901
+ self, choice: core_schema.CoreSchema, source_name: str | None
+ ) -> list[str | int]:
+ """This function recurses over `choice`, extracting all discriminator values that should map to this choice.
+
+ `model_name` is accepted for the purpose of producing useful error messages.
+ """
+ if choice['type'] == 'definitions':
+ return self._infer_discriminator_values_for_choice(choice['schema'], source_name=source_name)
+ elif choice['type'] == 'function-plain':
+ raise TypeError(
+ f'{choice["type"]!r} is not a valid discriminated union variant;'
+ ' should be a `BaseModel` or `dataclass`'
+ )
+ elif _core_utils.is_function_with_inner_schema(choice):
+ return self._infer_discriminator_values_for_choice(choice['schema'], source_name=source_name)
+ elif choice['type'] == 'lax-or-strict':
+ return sorted(
+ set(
+ self._infer_discriminator_values_for_choice(choice['lax_schema'], source_name=None)
+ + self._infer_discriminator_values_for_choice(choice['strict_schema'], source_name=None)
+ )
+ )
+
+ elif choice['type'] == 'tagged-union':
+ values: list[str | int] = []
+ # Ignore str/int "choices" since these are just references to other choices
+ subchoices = [x for x in choice['choices'].values() if not isinstance(x, (str, int))]
+ for subchoice in subchoices:
+ subchoice_values = self._infer_discriminator_values_for_choice(subchoice, source_name=None)
+ values.extend(subchoice_values)
+ return values
+
+ elif choice['type'] == 'union':
+ values = []
+ for subchoice in choice['choices']:
+ subchoice_schema = subchoice[0] if isinstance(subchoice, tuple) else subchoice
+ subchoice_values = self._infer_discriminator_values_for_choice(subchoice_schema, source_name=None)
+ values.extend(subchoice_values)
+ return values
+
+ elif choice['type'] == 'nullable':
+ self._should_be_nullable = True
+ return self._infer_discriminator_values_for_choice(choice['schema'], source_name=None)
+
+ elif choice['type'] == 'model':
+ return self._infer_discriminator_values_for_choice(choice['schema'], source_name=choice['cls'].__name__)
+
+ elif choice['type'] == 'dataclass':
+ return self._infer_discriminator_values_for_choice(choice['schema'], source_name=choice['cls'].__name__)
+
+ elif choice['type'] == 'model-fields':
+ return self._infer_discriminator_values_for_model_choice(choice, source_name=source_name)
+
+ elif choice['type'] == 'dataclass-args':
+ return self._infer_discriminator_values_for_dataclass_choice(choice, source_name=source_name)
+
+ elif choice['type'] == 'typed-dict':
+ return self._infer_discriminator_values_for_typed_dict_choice(choice, source_name=source_name)
+
+ elif choice['type'] == 'definition-ref':
+ schema_ref = choice['schema_ref']
+ if schema_ref not in self.definitions:
+ raise MissingDefinitionForUnionRef(schema_ref)
+ return self._infer_discriminator_values_for_choice(self.definitions[schema_ref], source_name=source_name)
+ else:
+ raise TypeError(
+ f'{choice["type"]!r} is not a valid discriminated union variant;'
+ ' should be a `BaseModel` or `dataclass`'
+ )
+
+ def _infer_discriminator_values_for_typed_dict_choice(
+ self, choice: core_schema.TypedDictSchema, source_name: str | None = None
+ ) -> list[str | int]:
+ """This method just extracts the _infer_discriminator_values_for_choice logic specific to TypedDictSchema
+ for the sake of readability.
+ """
+ source = 'TypedDict' if source_name is None else f'TypedDict {source_name!r}'
+ field = choice['fields'].get(self.discriminator)
+ if field is None:
+ raise PydanticUserError(
+ f'{source} needs a discriminator field for key {self.discriminator!r}', code='discriminator-no-field'
+ )
+ return self._infer_discriminator_values_for_field(field, source)
+
+ def _infer_discriminator_values_for_model_choice(
+ self, choice: core_schema.ModelFieldsSchema, source_name: str | None = None
+ ) -> list[str | int]:
+ source = 'ModelFields' if source_name is None else f'Model {source_name!r}'
+ field = choice['fields'].get(self.discriminator)
+ if field is None:
+ raise PydanticUserError(
+ f'{source} needs a discriminator field for key {self.discriminator!r}', code='discriminator-no-field'
+ )
+ return self._infer_discriminator_values_for_field(field, source)
+
+ def _infer_discriminator_values_for_dataclass_choice(
+ self, choice: core_schema.DataclassArgsSchema, source_name: str | None = None
+ ) -> list[str | int]:
+ source = 'DataclassArgs' if source_name is None else f'Dataclass {source_name!r}'
+ for field in choice['fields']:
+ if field['name'] == self.discriminator:
+ break
+ else:
+ raise PydanticUserError(
+ f'{source} needs a discriminator field for key {self.discriminator!r}', code='discriminator-no-field'
+ )
+ return self._infer_discriminator_values_for_field(field, source)
+
+ def _infer_discriminator_values_for_field(self, field: CoreSchemaField, source: str) -> list[str | int]:
+ if field['type'] == 'computed-field':
+ # This should never occur as a discriminator, as it is only relevant to serialization
+ return []
+ alias = field.get('validation_alias', self.discriminator)
+ if not isinstance(alias, str):
+ raise PydanticUserError(
+ f'Alias {alias!r} is not supported in a discriminated union', code='discriminator-alias-type'
+ )
+ if self._discriminator_alias is None:
+ self._discriminator_alias = alias
+ elif self._discriminator_alias != alias:
+ raise PydanticUserError(
+ f'Aliases for discriminator {self.discriminator!r} must be the same '
+ f'(got {alias}, {self._discriminator_alias})',
+ code='discriminator-alias',
+ )
+ return self._infer_discriminator_values_for_inner_schema(field['schema'], source)
+
+ def _infer_discriminator_values_for_inner_schema(
+ self, schema: core_schema.CoreSchema, source: str
+ ) -> list[str | int]:
+ """When inferring discriminator values for a field, we typically extract the expected values from a literal
+ schema. This function does that, but also handles nested unions and defaults.
+ """
+ if schema['type'] == 'literal':
+ return schema['expected']
+
+ elif schema['type'] == 'union':
+ # Generally when multiple values are allowed they should be placed in a single `Literal`, but
+ # we add this case to handle the situation where a field is annotated as a `Union` of `Literal`s.
+ # For example, this lets us handle `Union[Literal['key'], Union[Literal['Key'], Literal['KEY']]]`
+ values: list[Any] = []
+ for choice in schema['choices']:
+ choice_schema = choice[0] if isinstance(choice, tuple) else choice
+ choice_values = self._infer_discriminator_values_for_inner_schema(choice_schema, source)
+ values.extend(choice_values)
+ return values
+
+ elif schema['type'] == 'default':
+ # This will happen if the field has a default value; we ignore it while extracting the discriminator values
+ return self._infer_discriminator_values_for_inner_schema(schema['schema'], source)
+
+ elif schema['type'] == 'function-after':
+ # After validators don't affect the discriminator values
+ return self._infer_discriminator_values_for_inner_schema(schema['schema'], source)
+
+ elif schema['type'] in {'function-before', 'function-wrap', 'function-plain'}:
+ validator_type = repr(schema['type'].split('-')[1])
+ raise PydanticUserError(
+ f'Cannot use a mode={validator_type} validator in the'
+ f' discriminator field {self.discriminator!r} of {source}',
+ code='discriminator-validator',
+ )
+
+ else:
+ raise PydanticUserError(
+ f'{source} needs field {self.discriminator!r} to be of type `Literal`',
+ code='discriminator-needs-literal',
+ )
+
+ def _set_unique_choice_for_values(self, choice: core_schema.CoreSchema, values: Sequence[str | int]) -> None:
+ """This method updates `self.tagged_union_choices` so that all provided (discriminator) `values` map to the
+ provided `choice`, validating that none of these values already map to another (different) choice.
+ """
+ for discriminator_value in values:
+ if discriminator_value in self._tagged_union_choices:
+ # It is okay if `value` is already in tagged_union_choices as long as it maps to the same value.
+ # Because tagged_union_choices may map values to other values, we need to walk the choices dict
+ # until we get to a "real" choice, and confirm that is equal to the one assigned.
+ existing_choice = self._tagged_union_choices[discriminator_value]
+ if existing_choice != choice:
+ raise TypeError(
+ f'Value {discriminator_value!r} for discriminator '
+ f'{self.discriminator!r} mapped to multiple choices'
+ )
+ else:
+ self._tagged_union_choices[discriminator_value] = choice
diff --git a/.venv/lib/python3.12/site-packages/pydantic/_internal/_docs_extraction.py b/.venv/lib/python3.12/site-packages/pydantic/_internal/_docs_extraction.py
new file mode 100644
index 00000000..685a6d06
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pydantic/_internal/_docs_extraction.py
@@ -0,0 +1,108 @@
+"""Utilities related to attribute docstring extraction."""
+
+from __future__ import annotations
+
+import ast
+import inspect
+import textwrap
+from typing import Any
+
+
+class DocstringVisitor(ast.NodeVisitor):
+ def __init__(self) -> None:
+ super().__init__()
+
+ self.target: str | None = None
+ self.attrs: dict[str, str] = {}
+ self.previous_node_type: type[ast.AST] | None = None
+
+ def visit(self, node: ast.AST) -> Any:
+ node_result = super().visit(node)
+ self.previous_node_type = type(node)
+ return node_result
+
+ def visit_AnnAssign(self, node: ast.AnnAssign) -> Any:
+ if isinstance(node.target, ast.Name):
+ self.target = node.target.id
+
+ def visit_Expr(self, node: ast.Expr) -> Any:
+ if (
+ isinstance(node.value, ast.Constant)
+ and isinstance(node.value.value, str)
+ and self.previous_node_type is ast.AnnAssign
+ ):
+ docstring = inspect.cleandoc(node.value.value)
+ if self.target:
+ self.attrs[self.target] = docstring
+ self.target = None
+
+
+def _dedent_source_lines(source: list[str]) -> str:
+ # Required for nested class definitions, e.g. in a function block
+ dedent_source = textwrap.dedent(''.join(source))
+ if dedent_source.startswith((' ', '\t')):
+ # We are in the case where there's a dedented (usually multiline) string
+ # at a lower indentation level than the class itself. We wrap our class
+ # in a function as a workaround.
+ dedent_source = f'def dedent_workaround():\n{dedent_source}'
+ return dedent_source
+
+
+def _extract_source_from_frame(cls: type[Any]) -> list[str] | None:
+ frame = inspect.currentframe()
+
+ while frame:
+ if inspect.getmodule(frame) is inspect.getmodule(cls):
+ lnum = frame.f_lineno
+ try:
+ lines, _ = inspect.findsource(frame)
+ except OSError:
+ # Source can't be retrieved (maybe because running in an interactive terminal),
+ # we don't want to error here.
+ pass
+ else:
+ block_lines = inspect.getblock(lines[lnum - 1 :])
+ dedent_source = _dedent_source_lines(block_lines)
+ try:
+ block_tree = ast.parse(dedent_source)
+ except SyntaxError:
+ pass
+ else:
+ stmt = block_tree.body[0]
+ if isinstance(stmt, ast.FunctionDef) and stmt.name == 'dedent_workaround':
+ # `_dedent_source_lines` wrapped the class around the workaround function
+ stmt = stmt.body[0]
+ if isinstance(stmt, ast.ClassDef) and stmt.name == cls.__name__:
+ return block_lines
+
+ frame = frame.f_back
+
+
+def extract_docstrings_from_cls(cls: type[Any], use_inspect: bool = False) -> dict[str, str]:
+ """Map model attributes and their corresponding docstring.
+
+ Args:
+ cls: The class of the Pydantic model to inspect.
+ use_inspect: Whether to skip usage of frames to find the object and use
+ the `inspect` module instead.
+
+ Returns:
+ A mapping containing attribute names and their corresponding docstring.
+ """
+ if use_inspect:
+ # Might not work as expected if two classes have the same name in the same source file.
+ try:
+ source, _ = inspect.getsourcelines(cls)
+ except OSError:
+ return {}
+ else:
+ source = _extract_source_from_frame(cls)
+
+ if not source:
+ return {}
+
+ dedent_source = _dedent_source_lines(source)
+
+ visitor = DocstringVisitor()
+ visitor.visit(ast.parse(dedent_source))
+ return visitor.attrs
diff --git a/.venv/lib/python3.12/site-packages/pydantic/_internal/_fields.py b/.venv/lib/python3.12/site-packages/pydantic/_internal/_fields.py
new file mode 100644
index 00000000..5c760abc
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pydantic/_internal/_fields.py
@@ -0,0 +1,392 @@
+"""Private logic related to fields (the `Field()` function and `FieldInfo` class), and arguments to `Annotated`."""
+
+from __future__ import annotations as _annotations
+
+import dataclasses
+import warnings
+from copy import copy
+from functools import lru_cache
+from inspect import Parameter, ismethoddescriptor, signature
+from typing import TYPE_CHECKING, Any, Callable, Pattern
+
+from pydantic_core import PydanticUndefined
+from typing_extensions import TypeIs
+
+from pydantic.errors import PydanticUserError
+
+from . import _typing_extra
+from ._config import ConfigWrapper
+from ._docs_extraction import extract_docstrings_from_cls
+from ._import_utils import import_cached_base_model, import_cached_field_info
+from ._namespace_utils import NsResolver
+from ._repr import Representation
+from ._utils import can_be_positional
+
+if TYPE_CHECKING:
+ from annotated_types import BaseMetadata
+
+ from ..fields import FieldInfo
+ from ..main import BaseModel
+ from ._dataclasses import StandardDataclass
+ from ._decorators import DecoratorInfos
+
+
+class PydanticMetadata(Representation):
+ """Base class for annotation markers like `Strict`."""
+
+ __slots__ = ()
+
+
+def pydantic_general_metadata(**metadata: Any) -> BaseMetadata:
+ """Create a new `_PydanticGeneralMetadata` class with the given metadata.
+
+ Args:
+ **metadata: The metadata to add.
+
+ Returns:
+ The new `_PydanticGeneralMetadata` class.
+ """
+ return _general_metadata_cls()(metadata) # type: ignore
+
+
+@lru_cache(maxsize=None)
+def _general_metadata_cls() -> type[BaseMetadata]:
+ """Do it this way to avoid importing `annotated_types` at import time."""
+ from annotated_types import BaseMetadata
+
+ class _PydanticGeneralMetadata(PydanticMetadata, BaseMetadata):
+ """Pydantic general metadata like `max_digits`."""
+
+ def __init__(self, metadata: Any):
+ self.__dict__ = metadata
+
+ return _PydanticGeneralMetadata # type: ignore
+
+
+def _update_fields_from_docstrings(cls: type[Any], fields: dict[str, FieldInfo], config_wrapper: ConfigWrapper) -> None:
+ if config_wrapper.use_attribute_docstrings:
+ fields_docs = extract_docstrings_from_cls(cls)
+ for ann_name, field_info in fields.items():
+ if field_info.description is None and ann_name in fields_docs:
+ field_info.description = fields_docs[ann_name]
+
+
+def collect_model_fields( # noqa: C901
+ cls: type[BaseModel],
+ bases: tuple[type[Any], ...],
+ config_wrapper: ConfigWrapper,
+ ns_resolver: NsResolver | None,
+ *,
+ typevars_map: dict[Any, Any] | None = None,
+) -> tuple[dict[str, FieldInfo], set[str]]:
+ """Collect the fields of a nascent pydantic model.
+
+ Also collect the names of any ClassVars present in the type hints.
+
+ The returned value is a tuple of two items: the fields dict, and the set of ClassVar names.
+
+ Args:
+ cls: BaseModel or dataclass.
+ bases: Parents of the class, generally `cls.__bases__`.
+ config_wrapper: The config wrapper instance.
+ ns_resolver: Namespace resolver to use when getting model annotations.
+ typevars_map: A dictionary mapping type variables to their concrete types.
+
+ Returns:
+ A tuple contains fields and class variables.
+
+ Raises:
+ NameError:
+ - If there is a conflict between a field name and protected namespaces.
+ - If there is a field other than `root` in `RootModel`.
+ - If a field shadows an attribute in the parent model.
+ """
+ BaseModel = import_cached_base_model()
+ FieldInfo_ = import_cached_field_info()
+
+ parent_fields_lookup: dict[str, FieldInfo] = {}
+ for base in reversed(bases):
+ if model_fields := getattr(base, '__pydantic_fields__', None):
+ parent_fields_lookup.update(model_fields)
+
+ type_hints = _typing_extra.get_model_type_hints(cls, ns_resolver=ns_resolver)
+
+ # https://docs.python.org/3/howto/annotations.html#accessing-the-annotations-dict-of-an-object-in-python-3-9-and-older
+ # annotations is only used for finding fields in parent classes
+ annotations = cls.__dict__.get('__annotations__', {})
+ fields: dict[str, FieldInfo] = {}
+
+ class_vars: set[str] = set()
+ for ann_name, (ann_type, evaluated) in type_hints.items():
+ if ann_name == 'model_config':
+ # We never want to treat `model_config` as a field
+ # Note: we may need to change this logic if/when we introduce a `BareModel` class with no
+ # protected namespaces (where `model_config` might be allowed as a field name)
+ continue
+
+ for protected_namespace in config_wrapper.protected_namespaces:
+ ns_violation: bool = False
+ if isinstance(protected_namespace, Pattern):
+ ns_violation = protected_namespace.match(ann_name) is not None
+ elif isinstance(protected_namespace, str):
+ ns_violation = ann_name.startswith(protected_namespace)
+
+ if ns_violation:
+ for b in bases:
+ if hasattr(b, ann_name):
+ if not (issubclass(b, BaseModel) and ann_name in getattr(b, '__pydantic_fields__', {})):
+ raise NameError(
+ f'Field "{ann_name}" conflicts with member {getattr(b, ann_name)}'
+ f' of protected namespace "{protected_namespace}".'
+ )
+ else:
+ valid_namespaces = ()
+ for pn in config_wrapper.protected_namespaces:
+ if isinstance(pn, Pattern):
+ if not pn.match(ann_name):
+ valid_namespaces += (f're.compile({pn.pattern})',)
+ else:
+ if not ann_name.startswith(pn):
+ valid_namespaces += (pn,)
+
+ warnings.warn(
+ f'Field "{ann_name}" in {cls.__name__} has conflict with protected namespace "{protected_namespace}".'
+ '\n\nYou may be able to resolve this warning by setting'
+ f" `model_config['protected_namespaces'] = {valid_namespaces}`.",
+ UserWarning,
+ )
+ if _typing_extra.is_classvar_annotation(ann_type):
+ class_vars.add(ann_name)
+ continue
+ if _is_finalvar_with_default_val(ann_type, getattr(cls, ann_name, PydanticUndefined)):
+ class_vars.add(ann_name)
+ continue
+ if not is_valid_field_name(ann_name):
+ continue
+ if cls.__pydantic_root_model__ and ann_name != 'root':
+ raise NameError(
+ f"Unexpected field with name {ann_name!r}; only 'root' is allowed as a field of a `RootModel`"
+ )
+
+ # when building a generic model with `MyModel[int]`, the generic_origin check makes sure we don't get
+ # "... shadows an attribute" warnings
+ generic_origin = getattr(cls, '__pydantic_generic_metadata__', {}).get('origin')
+ for base in bases:
+ dataclass_fields = {
+ field.name for field in (dataclasses.fields(base) if dataclasses.is_dataclass(base) else ())
+ }
+ if hasattr(base, ann_name):
+ if base is generic_origin:
+ # Don't warn when "shadowing" of attributes in parametrized generics
+ continue
+
+ if ann_name in dataclass_fields:
+ # Don't warn when inheriting stdlib dataclasses whose fields are "shadowed" by defaults being set
+ # on the class instance.
+ continue
+
+ if ann_name not in annotations:
+ # Don't warn when a field exists in a parent class but has not been defined in the current class
+ continue
+
+ warnings.warn(
+ f'Field name "{ann_name}" in "{cls.__qualname__}" shadows an attribute in parent '
+ f'"{base.__qualname__}"',
+ UserWarning,
+ )
+
+ try:
+ default = getattr(cls, ann_name, PydanticUndefined)
+ if default is PydanticUndefined:
+ raise AttributeError
+ except AttributeError:
+ if ann_name in annotations:
+ field_info = FieldInfo_.from_annotation(ann_type)
+ field_info.evaluated = evaluated
+ else:
+ # if field has no default value and is not in __annotations__ this means that it is
+ # defined in a base class and we can take it from there
+ if ann_name in parent_fields_lookup:
+ # The field was present on one of the (possibly multiple) base classes
+ # copy the field to make sure typevar substitutions don't cause issues with the base classes
+ field_info = copy(parent_fields_lookup[ann_name])
+ else:
+ # The field was not found on any base classes; this seems to be caused by fields not getting
+ # generated thanks to models not being fully defined while initializing recursive models.
+ # Nothing stops us from just creating a new FieldInfo for this type hint, so we do this.
+ field_info = FieldInfo_.from_annotation(ann_type)
+ field_info.evaluated = evaluated
+ else:
+ _warn_on_nested_alias_in_annotation(ann_type, ann_name)
+ if isinstance(default, FieldInfo_) and ismethoddescriptor(default.default):
+ # the `getattr` call above triggers a call to `__get__` for descriptors, so we do
+ # the same if the `= field(default=...)` form is used. Note that we only do this
+ # for method descriptors for now, we might want to extend this to any descriptor
+ # in the future (by simply checking for `hasattr(default.default, '__get__')`).
+ default.default = default.default.__get__(None, cls)
+
+ field_info = FieldInfo_.from_annotated_attribute(ann_type, default)
+ field_info.evaluated = evaluated
+ # attributes which are fields are removed from the class namespace:
+ # 1. To match the behaviour of annotation-only fields
+ # 2. To avoid false positives in the NameError check above
+ try:
+ delattr(cls, ann_name)
+ except AttributeError:
+ pass # indicates the attribute was on a parent class
+
+ # Use cls.__dict__['__pydantic_decorators__'] instead of cls.__pydantic_decorators__
+ # to make sure the decorators have already been built for this exact class
+ decorators: DecoratorInfos = cls.__dict__['__pydantic_decorators__']
+ if ann_name in decorators.computed_fields:
+ raise ValueError("you can't override a field with a computed field")
+ fields[ann_name] = field_info
+
+ if typevars_map:
+ for field in fields.values():
+ field.apply_typevars_map(typevars_map)
+
+ _update_fields_from_docstrings(cls, fields, config_wrapper)
+ return fields, class_vars
+
+
+def _warn_on_nested_alias_in_annotation(ann_type: type[Any], ann_name: str) -> None:
+ FieldInfo = import_cached_field_info()
+
+ args = getattr(ann_type, '__args__', None)
+ if args:
+ for anno_arg in args:
+ if _typing_extra.is_annotated(anno_arg):
+ for anno_type_arg in _typing_extra.get_args(anno_arg):
+ if isinstance(anno_type_arg, FieldInfo) and anno_type_arg.alias is not None:
+ warnings.warn(
+ f'`alias` specification on field "{ann_name}" must be set on outermost annotation to take effect.',
+ UserWarning,
+ )
+ return
+
+
+def _is_finalvar_with_default_val(type_: type[Any], val: Any) -> bool:
+ FieldInfo = import_cached_field_info()
+
+ if not _typing_extra.is_finalvar(type_):
+ return False
+ elif val is PydanticUndefined:
+ return False
+ elif isinstance(val, FieldInfo) and (val.default is PydanticUndefined and val.default_factory is None):
+ return False
+ else:
+ return True
+
+
+def collect_dataclass_fields(
+ cls: type[StandardDataclass],
+ *,
+ ns_resolver: NsResolver | None = None,
+ typevars_map: dict[Any, Any] | None = None,
+ config_wrapper: ConfigWrapper | None = None,
+) -> dict[str, FieldInfo]:
+ """Collect the fields of a dataclass.
+
+ Args:
+ cls: dataclass.
+ ns_resolver: Namespace resolver to use when getting dataclass annotations.
+ Defaults to an empty instance.
+ typevars_map: A dictionary mapping type variables to their concrete types.
+ config_wrapper: The config wrapper instance.
+
+ Returns:
+ The dataclass fields.
+ """
+ FieldInfo_ = import_cached_field_info()
+
+ fields: dict[str, FieldInfo] = {}
+ ns_resolver = ns_resolver or NsResolver()
+ dataclass_fields = cls.__dataclass_fields__
+
+ # The logic here is similar to `_typing_extra.get_cls_type_hints`,
+ # although we do it manually as stdlib dataclasses already have annotations
+ # collected in each class:
+ for base in reversed(cls.__mro__):
+ if not dataclasses.is_dataclass(base):
+ continue
+
+ with ns_resolver.push(base):
+ for ann_name, dataclass_field in dataclass_fields.items():
+ if ann_name not in base.__dict__.get('__annotations__', {}):
+ # `__dataclass_fields__`contains every field, even the ones from base classes.
+ # Only collect the ones defined on `base`.
+ continue
+
+ globalns, localns = ns_resolver.types_namespace
+ ann_type, _ = _typing_extra.try_eval_type(dataclass_field.type, globalns, localns)
+
+ if _typing_extra.is_classvar_annotation(ann_type):
+ continue
+
+ if (
+ not dataclass_field.init
+ and dataclass_field.default is dataclasses.MISSING
+ and dataclass_field.default_factory is dataclasses.MISSING
+ ):
+ # TODO: We should probably do something with this so that validate_assignment behaves properly
+ # Issue: https://github.com/pydantic/pydantic/issues/5470
+ continue
+
+ if isinstance(dataclass_field.default, FieldInfo_):
+ if dataclass_field.default.init_var:
+ if dataclass_field.default.init is False:
+ raise PydanticUserError(
+ f'Dataclass field {ann_name} has init=False and init_var=True, but these are mutually exclusive.',
+ code='clashing-init-and-init-var',
+ )
+
+ # TODO: same note as above re validate_assignment
+ continue
+ field_info = FieldInfo_.from_annotated_attribute(ann_type, dataclass_field.default)
+ else:
+ field_info = FieldInfo_.from_annotated_attribute(ann_type, dataclass_field)
+
+ fields[ann_name] = field_info
+
+ if field_info.default is not PydanticUndefined and isinstance(
+ getattr(cls, ann_name, field_info), FieldInfo_
+ ):
+ # We need this to fix the default when the "default" from __dataclass_fields__ is a pydantic.FieldInfo
+ setattr(cls, ann_name, field_info.default)
+
+ if typevars_map:
+ for field in fields.values():
+ # We don't pass any ns, as `field.annotation`
+ # was already evaluated. TODO: is this method relevant?
+ # Can't we juste use `_generics.replace_types`?
+ field.apply_typevars_map(typevars_map)
+
+ if config_wrapper is not None:
+ _update_fields_from_docstrings(cls, fields, config_wrapper)
+
+ return fields
+
+
+def is_valid_field_name(name: str) -> bool:
+ return not name.startswith('_')
+
+
+def is_valid_privateattr_name(name: str) -> bool:
+ return name.startswith('_') and not name.startswith('__')
+
+
+def takes_validated_data_argument(
+ default_factory: Callable[[], Any] | Callable[[dict[str, Any]], Any],
+) -> TypeIs[Callable[[dict[str, Any]], Any]]:
+ """Whether the provided default factory callable has a validated data parameter."""
+ try:
+ sig = signature(default_factory)
+ except (ValueError, TypeError):
+ # `inspect.signature` might not be able to infer a signature, e.g. with C objects.
+ # In this case, we assume no data argument is present:
+ return False
+
+ parameters = list(sig.parameters.values())
+
+ return len(parameters) == 1 and can_be_positional(parameters[0]) and parameters[0].default is Parameter.empty
diff --git a/.venv/lib/python3.12/site-packages/pydantic/_internal/_forward_ref.py b/.venv/lib/python3.12/site-packages/pydantic/_internal/_forward_ref.py
new file mode 100644
index 00000000..231f81d1
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pydantic/_internal/_forward_ref.py
@@ -0,0 +1,23 @@
+from __future__ import annotations as _annotations
+
+from dataclasses import dataclass
+from typing import Union
+
+
+@dataclass
+class PydanticRecursiveRef:
+ type_ref: str
+
+ __name__ = 'PydanticRecursiveRef'
+ __hash__ = object.__hash__
+
+ def __call__(self) -> None:
+ """Defining __call__ is necessary for the `typing` module to let you use an instance of
+ this class as the result of resolving a standard ForwardRef.
+ """
+
+ def __or__(self, other):
+ return Union[self, other] # type: ignore
+
+ def __ror__(self, other):
+ return Union[other, self] # type: ignore
diff --git a/.venv/lib/python3.12/site-packages/pydantic/_internal/_generate_schema.py b/.venv/lib/python3.12/site-packages/pydantic/_internal/_generate_schema.py
new file mode 100644
index 00000000..4d4a6a63
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pydantic/_internal/_generate_schema.py
@@ -0,0 +1,2522 @@
+"""Convert python types to pydantic-core schema."""
+
+from __future__ import annotations as _annotations
+
+import collections.abc
+import dataclasses
+import datetime
+import inspect
+import os
+import pathlib
+import re
+import sys
+import typing
+import warnings
+from contextlib import contextmanager
+from copy import copy, deepcopy
+from decimal import Decimal
+from enum import Enum
+from fractions import Fraction
+from functools import partial
+from inspect import Parameter, _ParameterKind, signature
+from ipaddress import IPv4Address, IPv4Interface, IPv4Network, IPv6Address, IPv6Interface, IPv6Network
+from itertools import chain
+from operator import attrgetter
+from types import FunctionType, LambdaType, MethodType
+from typing import (
+ TYPE_CHECKING,
+ Any,
+ Callable,
+ Dict,
+ Final,
+ ForwardRef,
+ Iterable,
+ Iterator,
+ Mapping,
+ Type,
+ TypeVar,
+ Union,
+ cast,
+ overload,
+)
+from uuid import UUID
+from warnings import warn
+
+import typing_extensions
+from pydantic_core import (
+ CoreSchema,
+ MultiHostUrl,
+ PydanticCustomError,
+ PydanticSerializationUnexpectedValue,
+ PydanticUndefined,
+ Url,
+ core_schema,
+ to_jsonable_python,
+)
+from typing_extensions import Literal, TypeAliasType, TypedDict, get_args, get_origin, is_typeddict
+
+from ..aliases import AliasChoices, AliasGenerator, AliasPath
+from ..annotated_handlers import GetCoreSchemaHandler, GetJsonSchemaHandler
+from ..config import ConfigDict, JsonDict, JsonEncoder, JsonSchemaExtraCallable
+from ..errors import PydanticSchemaGenerationError, PydanticUndefinedAnnotation, PydanticUserError
+from ..functional_validators import AfterValidator, BeforeValidator, FieldValidatorModes, PlainValidator, WrapValidator
+from ..json_schema import JsonSchemaValue
+from ..version import version_short
+from ..warnings import PydanticDeprecatedSince20
+from . import _core_utils, _decorators, _discriminated_union, _known_annotated_metadata, _typing_extra
+from ._config import ConfigWrapper, ConfigWrapperStack
+from ._core_metadata import update_core_metadata
+from ._core_utils import (
+ collect_invalid_schemas,
+ define_expected_missing_refs,
+ get_ref,
+ get_type_ref,
+ is_function_with_inner_schema,
+ is_list_like_schema_with_items_schema,
+ simplify_schema_references,
+ validate_core_schema,
+)
+from ._decorators import (
+ Decorator,
+ DecoratorInfos,
+ FieldSerializerDecoratorInfo,
+ FieldValidatorDecoratorInfo,
+ ModelSerializerDecoratorInfo,
+ ModelValidatorDecoratorInfo,
+ RootValidatorDecoratorInfo,
+ ValidatorDecoratorInfo,
+ get_attribute_from_bases,
+ inspect_field_serializer,
+ inspect_model_serializer,
+ inspect_validator,
+)
+from ._docs_extraction import extract_docstrings_from_cls
+from ._fields import collect_dataclass_fields, takes_validated_data_argument
+from ._forward_ref import PydanticRecursiveRef
+from ._generics import get_standard_typevars_map, has_instance_in_type, recursively_defined_type_refs, replace_types
+from ._import_utils import import_cached_base_model, import_cached_field_info
+from ._mock_val_ser import MockCoreSchema
+from ._namespace_utils import NamespacesTuple, NsResolver
+from ._schema_generation_shared import CallbackGetCoreSchemaHandler
+from ._utils import lenient_issubclass, smart_deepcopy
+
+if TYPE_CHECKING:
+ from ..fields import ComputedFieldInfo, FieldInfo
+ from ..main import BaseModel
+ from ..types import Discriminator
+ from ._dataclasses import StandardDataclass
+ from ._schema_generation_shared import GetJsonSchemaFunction
+
+_SUPPORTS_TYPEDDICT = sys.version_info >= (3, 12)
+
+FieldDecoratorInfo = Union[ValidatorDecoratorInfo, FieldValidatorDecoratorInfo, FieldSerializerDecoratorInfo]
+FieldDecoratorInfoType = TypeVar('FieldDecoratorInfoType', bound=FieldDecoratorInfo)
+AnyFieldDecorator = Union[
+ Decorator[ValidatorDecoratorInfo],
+ Decorator[FieldValidatorDecoratorInfo],
+ Decorator[FieldSerializerDecoratorInfo],
+]
+
+ModifyCoreSchemaWrapHandler = GetCoreSchemaHandler
+GetCoreSchemaFunction = Callable[[Any, ModifyCoreSchemaWrapHandler], core_schema.CoreSchema]
+
+TUPLE_TYPES: list[type] = [tuple, typing.Tuple]
+LIST_TYPES: list[type] = [list, typing.List, collections.abc.MutableSequence]
+SET_TYPES: list[type] = [set, typing.Set, collections.abc.MutableSet]
+FROZEN_SET_TYPES: list[type] = [frozenset, typing.FrozenSet, collections.abc.Set]
+DICT_TYPES: list[type] = [dict, typing.Dict]
+IP_TYPES: list[type] = [IPv4Address, IPv4Interface, IPv4Network, IPv6Address, IPv6Interface, IPv6Network]
+SEQUENCE_TYPES: list[type] = [typing.Sequence, collections.abc.Sequence]
+PATH_TYPES: list[type] = [
+ os.PathLike,
+ pathlib.Path,
+ pathlib.PurePath,
+ pathlib.PosixPath,
+ pathlib.PurePosixPath,
+ pathlib.PureWindowsPath,
+]
+MAPPING_TYPES = [
+ typing.Mapping,
+ typing.MutableMapping,
+ collections.abc.Mapping,
+ collections.abc.MutableMapping,
+ collections.OrderedDict,
+ typing_extensions.OrderedDict,
+ typing.DefaultDict,
+ collections.defaultdict,
+ collections.Counter,
+ typing.Counter,
+]
+DEQUE_TYPES: list[type] = [collections.deque, typing.Deque]
+
+# Note: This does not play very well with type checkers. For example,
+# `a: LambdaType = lambda x: x` will raise a type error by Pyright.
+ValidateCallSupportedTypes = Union[
+ LambdaType,
+ FunctionType,
+ MethodType,
+ partial,
+]
+
+VALIDATE_CALL_SUPPORTED_TYPES = get_args(ValidateCallSupportedTypes)
+
+_mode_to_validator: dict[
+ FieldValidatorModes, type[BeforeValidator | AfterValidator | PlainValidator | WrapValidator]
+] = {'before': BeforeValidator, 'after': AfterValidator, 'plain': PlainValidator, 'wrap': WrapValidator}
+
+
+def check_validator_fields_against_field_name(
+ info: FieldDecoratorInfo,
+ field: str,
+) -> bool:
+ """Check if field name is in validator fields.
+
+ Args:
+ info: The field info.
+ field: The field name to check.
+
+ Returns:
+ `True` if field name is in validator fields, `False` otherwise.
+ """
+ if '*' in info.fields:
+ return True
+ for v_field_name in info.fields:
+ if v_field_name == field:
+ return True
+ return False
+
+
+def check_decorator_fields_exist(decorators: Iterable[AnyFieldDecorator], fields: Iterable[str]) -> None:
+ """Check if the defined fields in decorators exist in `fields` param.
+
+ It ignores the check for a decorator if the decorator has `*` as field or `check_fields=False`.
+
+ Args:
+ decorators: An iterable of decorators.
+ fields: An iterable of fields name.
+
+ Raises:
+ PydanticUserError: If one of the field names does not exist in `fields` param.
+ """
+ fields = set(fields)
+ for dec in decorators:
+ if '*' in dec.info.fields:
+ continue
+ if dec.info.check_fields is False:
+ continue
+ for field in dec.info.fields:
+ if field not in fields:
+ raise PydanticUserError(
+ f'Decorators defined with incorrect fields: {dec.cls_ref}.{dec.cls_var_name}'
+ " (use check_fields=False if you're inheriting from the model and intended this)",
+ code='decorator-missing-field',
+ )
+
+
+def filter_field_decorator_info_by_field(
+ validator_functions: Iterable[Decorator[FieldDecoratorInfoType]], field: str
+) -> list[Decorator[FieldDecoratorInfoType]]:
+ return [dec for dec in validator_functions if check_validator_fields_against_field_name(dec.info, field)]
+
+
+def apply_each_item_validators(
+ schema: core_schema.CoreSchema,
+ each_item_validators: list[Decorator[ValidatorDecoratorInfo]],
+ field_name: str | None,
+) -> core_schema.CoreSchema:
+ # This V1 compatibility shim should eventually be removed
+
+ # fail early if each_item_validators is empty
+ if not each_item_validators:
+ return schema
+
+ # push down any `each_item=True` validators
+ # note that this won't work for any Annotated types that get wrapped by a function validator
+ # but that's okay because that didn't exist in V1
+ if schema['type'] == 'nullable':
+ schema['schema'] = apply_each_item_validators(schema['schema'], each_item_validators, field_name)
+ return schema
+ elif schema['type'] == 'tuple':
+ if (variadic_item_index := schema.get('variadic_item_index')) is not None:
+ schema['items_schema'][variadic_item_index] = apply_validators(
+ schema['items_schema'][variadic_item_index],
+ each_item_validators,
+ field_name,
+ )
+ elif is_list_like_schema_with_items_schema(schema):
+ inner_schema = schema.get('items_schema', core_schema.any_schema())
+ schema['items_schema'] = apply_validators(inner_schema, each_item_validators, field_name)
+ elif schema['type'] == 'dict':
+ inner_schema = schema.get('values_schema', core_schema.any_schema())
+ schema['values_schema'] = apply_validators(inner_schema, each_item_validators, field_name)
+ else:
+ raise TypeError(
+ f"`@validator(..., each_item=True)` cannot be applied to fields with a schema of {schema['type']}"
+ )
+ return schema
+
+
+def _extract_json_schema_info_from_field_info(
+ info: FieldInfo | ComputedFieldInfo,
+) -> tuple[JsonDict | None, JsonDict | JsonSchemaExtraCallable | None]:
+ json_schema_updates = {
+ 'title': info.title,
+ 'description': info.description,
+ 'deprecated': bool(info.deprecated) or info.deprecated == '' or None,
+ 'examples': to_jsonable_python(info.examples),
+ }
+ json_schema_updates = {k: v for k, v in json_schema_updates.items() if v is not None}
+ return (json_schema_updates or None, info.json_schema_extra)
+
+
+JsonEncoders = Dict[Type[Any], JsonEncoder]
+
+
+def _add_custom_serialization_from_json_encoders(
+ json_encoders: JsonEncoders | None, tp: Any, schema: CoreSchema
+) -> CoreSchema:
+ """Iterate over the json_encoders and add the first matching encoder to the schema.
+
+ Args:
+ json_encoders: A dictionary of types and their encoder functions.
+ tp: The type to check for a matching encoder.
+ schema: The schema to add the encoder to.
+ """
+ if not json_encoders:
+ return schema
+ if 'serialization' in schema:
+ return schema
+ # Check the class type and its superclasses for a matching encoder
+ # Decimal.__class__.__mro__ (and probably other cases) doesn't include Decimal itself
+ # if the type is a GenericAlias (e.g. from list[int]) we need to use __class__ instead of .__mro__
+ for base in (tp, *getattr(tp, '__mro__', tp.__class__.__mro__)[:-1]):
+ encoder = json_encoders.get(base)
+ if encoder is None:
+ continue
+
+ warnings.warn(
+ f'`json_encoders` is deprecated. See https://docs.pydantic.dev/{version_short()}/concepts/serialization/#custom-serializers for alternatives',
+ PydanticDeprecatedSince20,
+ )
+
+ # TODO: in theory we should check that the schema accepts a serialization key
+ schema['serialization'] = core_schema.plain_serializer_function_ser_schema(encoder, when_used='json')
+ return schema
+
+ return schema
+
+
+def _get_first_non_null(a: Any, b: Any) -> Any:
+ """Return the first argument if it is not None, otherwise return the second argument.
+
+ Use case: serialization_alias (argument a) and alias (argument b) are both defined, and serialization_alias is ''.
+ This function will return serialization_alias, which is the first argument, even though it is an empty string.
+ """
+ return a if a is not None else b
+
+
+class GenerateSchema:
+ """Generate core schema for a Pydantic model, dataclass and types like `str`, `datetime`, ... ."""
+
+ __slots__ = (
+ '_config_wrapper_stack',
+ '_ns_resolver',
+ '_typevars_map',
+ 'field_name_stack',
+ 'model_type_stack',
+ 'defs',
+ )
+
+ def __init__(
+ self,
+ config_wrapper: ConfigWrapper,
+ ns_resolver: NsResolver | None = None,
+ typevars_map: dict[Any, Any] | None = None,
+ ) -> None:
+ # we need a stack for recursing into nested models
+ self._config_wrapper_stack = ConfigWrapperStack(config_wrapper)
+ self._ns_resolver = ns_resolver or NsResolver()
+ self._typevars_map = typevars_map
+ self.field_name_stack = _FieldNameStack()
+ self.model_type_stack = _ModelTypeStack()
+ self.defs = _Definitions()
+
+ def __init_subclass__(cls) -> None:
+ super().__init_subclass__()
+ warnings.warn(
+ 'Subclassing `GenerateSchema` is not supported. The API is highly subject to change in minor versions.',
+ UserWarning,
+ stacklevel=2,
+ )
+
+ @property
+ def _config_wrapper(self) -> ConfigWrapper:
+ return self._config_wrapper_stack.tail
+
+ @property
+ def _types_namespace(self) -> NamespacesTuple:
+ return self._ns_resolver.types_namespace
+
+ @property
+ def _arbitrary_types(self) -> bool:
+ return self._config_wrapper.arbitrary_types_allowed
+
+ # the following methods can be overridden but should be considered
+ # unstable / private APIs
+ def _list_schema(self, items_type: Any) -> CoreSchema:
+ return core_schema.list_schema(self.generate_schema(items_type))
+
+ def _dict_schema(self, keys_type: Any, values_type: Any) -> CoreSchema:
+ return core_schema.dict_schema(self.generate_schema(keys_type), self.generate_schema(values_type))
+
+ def _set_schema(self, items_type: Any) -> CoreSchema:
+ return core_schema.set_schema(self.generate_schema(items_type))
+
+ def _frozenset_schema(self, items_type: Any) -> CoreSchema:
+ return core_schema.frozenset_schema(self.generate_schema(items_type))
+
+ def _enum_schema(self, enum_type: type[Enum]) -> CoreSchema:
+ cases: list[Any] = list(enum_type.__members__.values())
+
+ enum_ref = get_type_ref(enum_type)
+ description = None if not enum_type.__doc__ else inspect.cleandoc(enum_type.__doc__)
+ if (
+ description == 'An enumeration.'
+ ): # This is the default value provided by enum.EnumMeta.__new__; don't use it
+ description = None
+ js_updates = {'title': enum_type.__name__, 'description': description}
+ js_updates = {k: v for k, v in js_updates.items() if v is not None}
+
+ sub_type: Literal['str', 'int', 'float'] | None = None
+ if issubclass(enum_type, int):
+ sub_type = 'int'
+ value_ser_type: core_schema.SerSchema = core_schema.simple_ser_schema('int')
+ elif issubclass(enum_type, str):
+ # this handles `StrEnum` (3.11 only), and also `Foobar(str, Enum)`
+ sub_type = 'str'
+ value_ser_type = core_schema.simple_ser_schema('str')
+ elif issubclass(enum_type, float):
+ sub_type = 'float'
+ value_ser_type = core_schema.simple_ser_schema('float')
+ else:
+ # TODO this is an ugly hack, how do we trigger an Any schema for serialization?
+ value_ser_type = core_schema.plain_serializer_function_ser_schema(lambda x: x)
+
+ if cases:
+
+ def get_json_schema(schema: CoreSchema, handler: GetJsonSchemaHandler) -> JsonSchemaValue:
+ json_schema = handler(schema)
+ original_schema = handler.resolve_ref_schema(json_schema)
+ original_schema.update(js_updates)
+ return json_schema
+
+ # we don't want to add the missing to the schema if it's the default one
+ default_missing = getattr(enum_type._missing_, '__func__', None) is Enum._missing_.__func__ # pyright: ignore[reportFunctionMemberAccess]
+ enum_schema = core_schema.enum_schema(
+ enum_type,
+ cases,
+ sub_type=sub_type,
+ missing=None if default_missing else enum_type._missing_,
+ ref=enum_ref,
+ metadata={'pydantic_js_functions': [get_json_schema]},
+ )
+
+ if self._config_wrapper.use_enum_values:
+ enum_schema = core_schema.no_info_after_validator_function(
+ attrgetter('value'), enum_schema, serialization=value_ser_type
+ )
+
+ return enum_schema
+
+ else:
+
+ def get_json_schema_no_cases(_, handler: GetJsonSchemaHandler) -> JsonSchemaValue:
+ json_schema = handler(core_schema.enum_schema(enum_type, cases, sub_type=sub_type, ref=enum_ref))
+ original_schema = handler.resolve_ref_schema(json_schema)
+ original_schema.update(js_updates)
+ return json_schema
+
+ # Use an isinstance check for enums with no cases.
+ # The most important use case for this is creating TypeVar bounds for generics that should
+ # be restricted to enums. This is more consistent than it might seem at first, since you can only
+ # subclass enum.Enum (or subclasses of enum.Enum) if all parent classes have no cases.
+ # We use the get_json_schema function when an Enum subclass has been declared with no cases
+ # so that we can still generate a valid json schema.
+ return core_schema.is_instance_schema(
+ enum_type,
+ metadata={'pydantic_js_functions': [get_json_schema_no_cases]},
+ )
+
+ def _ip_schema(self, tp: Any) -> CoreSchema:
+ from ._validators import IP_VALIDATOR_LOOKUP, IpType
+
+ ip_type_json_schema_format: dict[type[IpType], str] = {
+ IPv4Address: 'ipv4',
+ IPv4Network: 'ipv4network',
+ IPv4Interface: 'ipv4interface',
+ IPv6Address: 'ipv6',
+ IPv6Network: 'ipv6network',
+ IPv6Interface: 'ipv6interface',
+ }
+
+ def ser_ip(ip: Any, info: core_schema.SerializationInfo) -> str | IpType:
+ if not isinstance(ip, (tp, str)):
+ raise PydanticSerializationUnexpectedValue(
+ f"Expected `{tp}` but got `{type(ip)}` with value `'{ip}'` - serialized value may not be as expected."
+ )
+ if info.mode == 'python':
+ return ip
+ return str(ip)
+
+ return core_schema.lax_or_strict_schema(
+ lax_schema=core_schema.no_info_plain_validator_function(IP_VALIDATOR_LOOKUP[tp]),
+ strict_schema=core_schema.json_or_python_schema(
+ json_schema=core_schema.no_info_after_validator_function(tp, core_schema.str_schema()),
+ python_schema=core_schema.is_instance_schema(tp),
+ ),
+ serialization=core_schema.plain_serializer_function_ser_schema(ser_ip, info_arg=True, when_used='always'),
+ metadata={
+ 'pydantic_js_functions': [lambda _1, _2: {'type': 'string', 'format': ip_type_json_schema_format[tp]}]
+ },
+ )
+
+ def _fraction_schema(self) -> CoreSchema:
+ """Support for [`fractions.Fraction`][fractions.Fraction]."""
+ from ._validators import fraction_validator
+
+ # TODO: note, this is a fairly common pattern, re lax / strict for attempted type coercion,
+ # can we use a helper function to reduce boilerplate?
+ return core_schema.lax_or_strict_schema(
+ lax_schema=core_schema.no_info_plain_validator_function(fraction_validator),
+ strict_schema=core_schema.json_or_python_schema(
+ json_schema=core_schema.no_info_plain_validator_function(fraction_validator),
+ python_schema=core_schema.is_instance_schema(Fraction),
+ ),
+ # use str serialization to guarantee round trip behavior
+ serialization=core_schema.to_string_ser_schema(when_used='always'),
+ metadata={'pydantic_js_functions': [lambda _1, _2: {'type': 'string', 'format': 'fraction'}]},
+ )
+
+ def _arbitrary_type_schema(self, tp: Any) -> CoreSchema:
+ if not isinstance(tp, type):
+ warn(
+ f'{tp!r} is not a Python type (it may be an instance of an object),'
+ ' Pydantic will allow any object with no validation since we cannot even'
+ ' enforce that the input is an instance of the given type.'
+ ' To get rid of this error wrap the type with `pydantic.SkipValidation`.',
+ UserWarning,
+ )
+ return core_schema.any_schema()
+ return core_schema.is_instance_schema(tp)
+
+ def _unknown_type_schema(self, obj: Any) -> CoreSchema:
+ raise PydanticSchemaGenerationError(
+ f'Unable to generate pydantic-core schema for {obj!r}. '
+ 'Set `arbitrary_types_allowed=True` in the model_config to ignore this error'
+ ' or implement `__get_pydantic_core_schema__` on your type to fully support it.'
+ '\n\nIf you got this error by calling handler(<some type>) within'
+ ' `__get_pydantic_core_schema__` then you likely need to call'
+ ' `handler.generate_schema(<some type>)` since we do not call'
+ ' `__get_pydantic_core_schema__` on `<some type>` otherwise to avoid infinite recursion.'
+ )
+
+ def _apply_discriminator_to_union(
+ self, schema: CoreSchema, discriminator: str | Discriminator | None
+ ) -> CoreSchema:
+ if discriminator is None:
+ return schema
+ try:
+ return _discriminated_union.apply_discriminator(
+ schema,
+ discriminator,
+ )
+ except _discriminated_union.MissingDefinitionForUnionRef:
+ # defer until defs are resolved
+ _discriminated_union.set_discriminator_in_metadata(
+ schema,
+ discriminator,
+ )
+ return schema
+
+ class CollectedInvalid(Exception):
+ pass
+
+ def clean_schema(self, schema: CoreSchema) -> CoreSchema:
+ schema = self.collect_definitions(schema)
+ schema = simplify_schema_references(schema)
+ if collect_invalid_schemas(schema):
+ raise self.CollectedInvalid()
+ schema = _discriminated_union.apply_discriminators(schema)
+ schema = validate_core_schema(schema)
+ return schema
+
+ def collect_definitions(self, schema: CoreSchema) -> CoreSchema:
+ ref = cast('str | None', schema.get('ref', None))
+ if ref:
+ self.defs.definitions[ref] = schema
+ if 'ref' in schema:
+ schema = core_schema.definition_reference_schema(schema['ref'])
+ return core_schema.definitions_schema(
+ schema,
+ list(self.defs.definitions.values()),
+ )
+
+ def _add_js_function(self, metadata_schema: CoreSchema, js_function: Callable[..., Any]) -> None:
+ metadata = metadata_schema.get('metadata', {})
+ pydantic_js_functions = metadata.setdefault('pydantic_js_functions', [])
+ # because of how we generate core schemas for nested generic models
+ # we can end up adding `BaseModel.__get_pydantic_json_schema__` multiple times
+ # this check may fail to catch duplicates if the function is a `functools.partial`
+ # or something like that, but if it does it'll fail by inserting the duplicate
+ if js_function not in pydantic_js_functions:
+ pydantic_js_functions.append(js_function)
+ metadata_schema['metadata'] = metadata
+
+ def generate_schema(
+ self,
+ obj: Any,
+ from_dunder_get_core_schema: bool = True,
+ ) -> core_schema.CoreSchema:
+ """Generate core schema.
+
+ Args:
+ obj: The object to generate core schema for.
+ from_dunder_get_core_schema: Whether to generate schema from either the
+ `__get_pydantic_core_schema__` function or `__pydantic_core_schema__` property.
+
+ Returns:
+ The generated core schema.
+
+ Raises:
+ PydanticUndefinedAnnotation:
+ If it is not possible to evaluate forward reference.
+ PydanticSchemaGenerationError:
+ If it is not possible to generate pydantic-core schema.
+ TypeError:
+ - If `alias_generator` returns a disallowed type (must be str, AliasPath or AliasChoices).
+ - If V1 style validator with `each_item=True` applied on a wrong field.
+ PydanticUserError:
+ - If `typing.TypedDict` is used instead of `typing_extensions.TypedDict` on Python < 3.12.
+ - If `__modify_schema__` method is used instead of `__get_pydantic_json_schema__`.
+ """
+ schema: CoreSchema | None = None
+
+ if from_dunder_get_core_schema:
+ from_property = self._generate_schema_from_property(obj, obj)
+ if from_property is not None:
+ schema = from_property
+
+ if schema is None:
+ schema = self._generate_schema_inner(obj)
+
+ metadata_js_function = _extract_get_pydantic_json_schema(obj, schema)
+ if metadata_js_function is not None:
+ metadata_schema = resolve_original_schema(schema, self.defs.definitions)
+ if metadata_schema:
+ self._add_js_function(metadata_schema, metadata_js_function)
+
+ schema = _add_custom_serialization_from_json_encoders(self._config_wrapper.json_encoders, obj, schema)
+
+ return schema
+
+ def _model_schema(self, cls: type[BaseModel]) -> core_schema.CoreSchema:
+ """Generate schema for a Pydantic model."""
+ with self.defs.get_schema_or_ref(cls) as (model_ref, maybe_schema):
+ if maybe_schema is not None:
+ return maybe_schema
+
+ fields = getattr(cls, '__pydantic_fields__', {})
+ decorators = cls.__pydantic_decorators__
+ computed_fields = decorators.computed_fields
+ check_decorator_fields_exist(
+ chain(
+ decorators.field_validators.values(),
+ decorators.field_serializers.values(),
+ decorators.validators.values(),
+ ),
+ {*fields.keys(), *computed_fields.keys()},
+ )
+ config_wrapper = ConfigWrapper(cls.model_config, check=False)
+ core_config = config_wrapper.core_config(title=cls.__name__)
+ model_validators = decorators.model_validators.values()
+
+ with self._config_wrapper_stack.push(config_wrapper), self._ns_resolver.push(cls):
+ extras_schema = None
+ if core_config.get('extra_fields_behavior') == 'allow':
+ assert cls.__mro__[0] is cls
+ assert cls.__mro__[-1] is object
+ for candidate_cls in cls.__mro__[:-1]:
+ extras_annotation = getattr(candidate_cls, '__annotations__', {}).get(
+ '__pydantic_extra__', None
+ )
+ if extras_annotation is not None:
+ if isinstance(extras_annotation, str):
+ extras_annotation = _typing_extra.eval_type_backport(
+ _typing_extra._make_forward_ref(
+ extras_annotation, is_argument=False, is_class=True
+ ),
+ *self._types_namespace,
+ )
+ tp = get_origin(extras_annotation)
+ if tp not in (Dict, dict):
+ raise PydanticSchemaGenerationError(
+ 'The type annotation for `__pydantic_extra__` must be `Dict[str, ...]`'
+ )
+ extra_items_type = self._get_args_resolving_forward_refs(
+ extras_annotation,
+ required=True,
+ )[1]
+ if not _typing_extra.is_any(extra_items_type):
+ extras_schema = self.generate_schema(extra_items_type)
+ break
+
+ generic_origin: type[BaseModel] | None = getattr(cls, '__pydantic_generic_metadata__', {}).get('origin')
+
+ if cls.__pydantic_root_model__:
+ root_field = self._common_field_schema('root', fields['root'], decorators)
+ inner_schema = root_field['schema']
+ inner_schema = apply_model_validators(inner_schema, model_validators, 'inner')
+ model_schema = core_schema.model_schema(
+ cls,
+ inner_schema,
+ generic_origin=generic_origin,
+ custom_init=getattr(cls, '__pydantic_custom_init__', None),
+ root_model=True,
+ post_init=getattr(cls, '__pydantic_post_init__', None),
+ config=core_config,
+ ref=model_ref,
+ )
+ else:
+ fields_schema: core_schema.CoreSchema = core_schema.model_fields_schema(
+ {k: self._generate_md_field_schema(k, v, decorators) for k, v in fields.items()},
+ computed_fields=[
+ self._computed_field_schema(d, decorators.field_serializers)
+ for d in computed_fields.values()
+ ],
+ extras_schema=extras_schema,
+ model_name=cls.__name__,
+ )
+ inner_schema = apply_validators(fields_schema, decorators.root_validators.values(), None)
+ new_inner_schema = define_expected_missing_refs(inner_schema, recursively_defined_type_refs())
+ if new_inner_schema is not None:
+ inner_schema = new_inner_schema
+ inner_schema = apply_model_validators(inner_schema, model_validators, 'inner')
+
+ model_schema = core_schema.model_schema(
+ cls,
+ inner_schema,
+ generic_origin=generic_origin,
+ custom_init=getattr(cls, '__pydantic_custom_init__', None),
+ root_model=False,
+ post_init=getattr(cls, '__pydantic_post_init__', None),
+ config=core_config,
+ ref=model_ref,
+ )
+
+ schema = self._apply_model_serializers(model_schema, decorators.model_serializers.values())
+ schema = apply_model_validators(schema, model_validators, 'outer')
+ self.defs.definitions[model_ref] = schema
+ return core_schema.definition_reference_schema(model_ref)
+
+ def _unpack_refs_defs(self, schema: CoreSchema) -> CoreSchema:
+ """Unpack all 'definitions' schemas into `GenerateSchema.defs.definitions`
+ and return the inner schema.
+ """
+ if schema['type'] == 'definitions':
+ definitions = self.defs.definitions
+ for s in schema['definitions']:
+ definitions[s['ref']] = s # type: ignore
+ return schema['schema']
+ return schema
+
+ def _resolve_self_type(self, obj: Any) -> Any:
+ obj = self.model_type_stack.get()
+ if obj is None:
+ raise PydanticUserError('`typing.Self` is invalid in this context', code='invalid-self-type')
+ return obj
+
+ def _generate_schema_from_property(self, obj: Any, source: Any) -> core_schema.CoreSchema | None:
+ """Try to generate schema from either the `__get_pydantic_core_schema__` function or
+ `__pydantic_core_schema__` property.
+
+ Note: `__get_pydantic_core_schema__` takes priority so it can
+ decide whether to use a `__pydantic_core_schema__` attribute, or generate a fresh schema.
+ """
+ # avoid calling `__get_pydantic_core_schema__` if we've already visited this object
+ if _typing_extra.is_self(obj):
+ obj = self._resolve_self_type(obj)
+ with self.defs.get_schema_or_ref(obj) as (_, maybe_schema):
+ if maybe_schema is not None:
+ return maybe_schema
+ if obj is source:
+ ref_mode = 'unpack'
+ else:
+ ref_mode = 'to-def'
+
+ schema: CoreSchema
+
+ if (get_schema := getattr(obj, '__get_pydantic_core_schema__', None)) is not None:
+ schema = get_schema(
+ source, CallbackGetCoreSchemaHandler(self._generate_schema_inner, self, ref_mode=ref_mode)
+ )
+ elif (
+ hasattr(obj, '__dict__')
+ # In some cases (e.g. a stdlib dataclass subclassing a Pydantic dataclass),
+ # doing an attribute access to get the schema will result in the parent schema
+ # being fetched. Thus, only look for the current obj's dict:
+ and (existing_schema := obj.__dict__.get('__pydantic_core_schema__')) is not None
+ and not isinstance(existing_schema, MockCoreSchema)
+ ):
+ schema = existing_schema
+ elif (validators := getattr(obj, '__get_validators__', None)) is not None:
+ from pydantic.v1 import BaseModel as BaseModelV1
+
+ if issubclass(obj, BaseModelV1):
+ warn(
+ f'Mixing V1 models and V2 models (or constructs, like `TypeAdapter`) is not supported. Please upgrade `{obj.__name__}` to V2.',
+ UserWarning,
+ )
+ else:
+ warn(
+ '`__get_validators__` is deprecated and will be removed, use `__get_pydantic_core_schema__` instead.',
+ PydanticDeprecatedSince20,
+ )
+ schema = core_schema.chain_schema([core_schema.with_info_plain_validator_function(v) for v in validators()])
+ else:
+ # we have no existing schema information on the property, exit early so that we can go generate a schema
+ return None
+
+ schema = self._unpack_refs_defs(schema)
+
+ if is_function_with_inner_schema(schema):
+ ref = schema['schema'].pop('ref', None) # pyright: ignore[reportCallIssue, reportArgumentType]
+ if ref:
+ schema['ref'] = ref
+ else:
+ ref = get_ref(schema)
+
+ if ref:
+ self.defs.definitions[ref] = schema
+ return core_schema.definition_reference_schema(ref)
+
+ return schema
+
+ def _resolve_forward_ref(self, obj: Any) -> Any:
+ # we assume that types_namespace has the target of forward references in its scope,
+ # but this could fail, for example, if calling Validator on an imported type which contains
+ # forward references to other types only defined in the module from which it was imported
+ # `Validator(SomeImportedTypeAliasWithAForwardReference)`
+ # or the equivalent for BaseModel
+ # class Model(BaseModel):
+ # x: SomeImportedTypeAliasWithAForwardReference
+ try:
+ obj = _typing_extra.eval_type_backport(obj, *self._types_namespace)
+ except NameError as e:
+ raise PydanticUndefinedAnnotation.from_name_error(e) from e
+
+ # if obj is still a ForwardRef, it means we can't evaluate it, raise PydanticUndefinedAnnotation
+ if isinstance(obj, ForwardRef):
+ raise PydanticUndefinedAnnotation(obj.__forward_arg__, f'Unable to evaluate forward reference {obj}')
+
+ if self._typevars_map:
+ obj = replace_types(obj, self._typevars_map)
+
+ return obj
+
+ @overload
+ def _get_args_resolving_forward_refs(self, obj: Any, required: Literal[True]) -> tuple[Any, ...]: ...
+
+ @overload
+ def _get_args_resolving_forward_refs(self, obj: Any) -> tuple[Any, ...] | None: ...
+
+ def _get_args_resolving_forward_refs(self, obj: Any, required: bool = False) -> tuple[Any, ...] | None:
+ args = get_args(obj)
+ if args:
+ if sys.version_info >= (3, 9):
+ from types import GenericAlias
+
+ if isinstance(obj, GenericAlias):
+ # PEP 585 generic aliases don't convert args to ForwardRefs, unlike `typing.List/Dict` etc.
+ args = (_typing_extra._make_forward_ref(a) if isinstance(a, str) else a for a in args)
+ args = tuple(self._resolve_forward_ref(a) if isinstance(a, ForwardRef) else a for a in args)
+ elif required: # pragma: no cover
+ raise TypeError(f'Expected {obj} to have generic parameters but it had none')
+ return args
+
+ def _get_first_arg_or_any(self, obj: Any) -> Any:
+ args = self._get_args_resolving_forward_refs(obj)
+ if not args:
+ return Any
+ return args[0]
+
+ def _get_first_two_args_or_any(self, obj: Any) -> tuple[Any, Any]:
+ args = self._get_args_resolving_forward_refs(obj)
+ if not args:
+ return (Any, Any)
+ if len(args) < 2:
+ origin = get_origin(obj)
+ raise TypeError(f'Expected two type arguments for {origin}, got 1')
+ return args[0], args[1]
+
+ def _generate_schema_inner(self, obj: Any) -> core_schema.CoreSchema:
+ if _typing_extra.is_annotated(obj):
+ return self._annotated_schema(obj)
+
+ if isinstance(obj, dict):
+ # we assume this is already a valid schema
+ return obj # type: ignore[return-value]
+
+ if isinstance(obj, str):
+ obj = ForwardRef(obj)
+
+ if isinstance(obj, ForwardRef):
+ return self.generate_schema(self._resolve_forward_ref(obj))
+
+ BaseModel = import_cached_base_model()
+
+ if lenient_issubclass(obj, BaseModel):
+ with self.model_type_stack.push(obj):
+ return self._model_schema(obj)
+
+ if isinstance(obj, PydanticRecursiveRef):
+ return core_schema.definition_reference_schema(schema_ref=obj.type_ref)
+
+ return self.match_type(obj)
+
+ def match_type(self, obj: Any) -> core_schema.CoreSchema: # noqa: C901
+ """Main mapping of types to schemas.
+
+ The general structure is a series of if statements starting with the simple cases
+ (non-generic primitive types) and then handling generics and other more complex cases.
+
+ Each case either generates a schema directly, calls into a public user-overridable method
+ (like `GenerateSchema.tuple_variable_schema`) or calls into a private method that handles some
+ boilerplate before calling into the user-facing method (e.g. `GenerateSchema._tuple_schema`).
+
+ The idea is that we'll evolve this into adding more and more user facing methods over time
+ as they get requested and we figure out what the right API for them is.
+ """
+ if obj is str:
+ return core_schema.str_schema()
+ elif obj is bytes:
+ return core_schema.bytes_schema()
+ elif obj is int:
+ return core_schema.int_schema()
+ elif obj is float:
+ return core_schema.float_schema()
+ elif obj is bool:
+ return core_schema.bool_schema()
+ elif obj is complex:
+ return core_schema.complex_schema()
+ elif _typing_extra.is_any(obj) or obj is object:
+ return core_schema.any_schema()
+ elif obj is datetime.date:
+ return core_schema.date_schema()
+ elif obj is datetime.datetime:
+ return core_schema.datetime_schema()
+ elif obj is datetime.time:
+ return core_schema.time_schema()
+ elif obj is datetime.timedelta:
+ return core_schema.timedelta_schema()
+ elif obj is Decimal:
+ return core_schema.decimal_schema()
+ elif obj is UUID:
+ return core_schema.uuid_schema()
+ elif obj is Url:
+ return core_schema.url_schema()
+ elif obj is Fraction:
+ return self._fraction_schema()
+ elif obj is MultiHostUrl:
+ return core_schema.multi_host_url_schema()
+ elif obj is None or obj is _typing_extra.NoneType:
+ return core_schema.none_schema()
+ elif obj in IP_TYPES:
+ return self._ip_schema(obj)
+ elif obj in TUPLE_TYPES:
+ return self._tuple_schema(obj)
+ elif obj in LIST_TYPES:
+ return self._list_schema(Any)
+ elif obj in SET_TYPES:
+ return self._set_schema(Any)
+ elif obj in FROZEN_SET_TYPES:
+ return self._frozenset_schema(Any)
+ elif obj in SEQUENCE_TYPES:
+ return self._sequence_schema(Any)
+ elif obj in DICT_TYPES:
+ return self._dict_schema(Any, Any)
+ elif _typing_extra.is_type_alias_type(obj):
+ return self._type_alias_type_schema(obj)
+ elif obj is type:
+ return self._type_schema()
+ elif _typing_extra.is_callable(obj):
+ return core_schema.callable_schema()
+ elif _typing_extra.is_literal(obj):
+ return self._literal_schema(obj)
+ elif is_typeddict(obj):
+ return self._typed_dict_schema(obj, None)
+ elif _typing_extra.is_namedtuple(obj):
+ return self._namedtuple_schema(obj, None)
+ elif _typing_extra.is_new_type(obj):
+ # NewType, can't use isinstance because it fails <3.10
+ return self.generate_schema(obj.__supertype__)
+ elif obj is re.Pattern:
+ return self._pattern_schema(obj)
+ elif _typing_extra.is_hashable(obj):
+ return self._hashable_schema()
+ elif isinstance(obj, typing.TypeVar):
+ return self._unsubstituted_typevar_schema(obj)
+ elif _typing_extra.is_finalvar(obj):
+ if obj is Final:
+ return core_schema.any_schema()
+ return self.generate_schema(
+ self._get_first_arg_or_any(obj),
+ )
+ elif isinstance(obj, VALIDATE_CALL_SUPPORTED_TYPES):
+ return self._call_schema(obj)
+ elif inspect.isclass(obj) and issubclass(obj, Enum):
+ return self._enum_schema(obj)
+ elif _typing_extra.is_zoneinfo_type(obj):
+ return self._zoneinfo_schema()
+
+ if dataclasses.is_dataclass(obj):
+ return self._dataclass_schema(obj, None)
+
+ origin = get_origin(obj)
+ if origin is not None:
+ return self._match_generic_type(obj, origin)
+
+ res = self._get_prepare_pydantic_annotations_for_known_type(obj, ())
+ if res is not None:
+ source_type, annotations = res
+ return self._apply_annotations(source_type, annotations)
+
+ if self._arbitrary_types:
+ return self._arbitrary_type_schema(obj)
+ return self._unknown_type_schema(obj)
+
+ def _match_generic_type(self, obj: Any, origin: Any) -> CoreSchema: # noqa: C901
+ # Need to handle generic dataclasses before looking for the schema properties because attribute accesses
+ # on _GenericAlias delegate to the origin type, so lose the information about the concrete parametrization
+ # As a result, currently, there is no way to cache the schema for generic dataclasses. This may be possible
+ # to resolve by modifying the value returned by `Generic.__class_getitem__`, but that is a dangerous game.
+ if dataclasses.is_dataclass(origin):
+ return self._dataclass_schema(obj, origin) # pyright: ignore[reportArgumentType]
+ if _typing_extra.is_namedtuple(origin):
+ return self._namedtuple_schema(obj, origin)
+
+ from_property = self._generate_schema_from_property(origin, obj)
+ if from_property is not None:
+ return from_property
+
+ if _typing_extra.is_type_alias_type(origin):
+ return self._type_alias_type_schema(obj)
+ elif _typing_extra.origin_is_union(origin):
+ return self._union_schema(obj)
+ elif origin in TUPLE_TYPES:
+ return self._tuple_schema(obj)
+ elif origin in LIST_TYPES:
+ return self._list_schema(self._get_first_arg_or_any(obj))
+ elif origin in SET_TYPES:
+ return self._set_schema(self._get_first_arg_or_any(obj))
+ elif origin in FROZEN_SET_TYPES:
+ return self._frozenset_schema(self._get_first_arg_or_any(obj))
+ elif origin in DICT_TYPES:
+ return self._dict_schema(*self._get_first_two_args_or_any(obj))
+ elif is_typeddict(origin):
+ return self._typed_dict_schema(obj, origin)
+ elif origin in (typing.Type, type):
+ return self._subclass_schema(obj)
+ elif origin in SEQUENCE_TYPES:
+ return self._sequence_schema(self._get_first_arg_or_any(obj))
+ elif origin in {typing.Iterable, collections.abc.Iterable, typing.Generator, collections.abc.Generator}:
+ return self._iterable_schema(obj)
+ elif origin in (re.Pattern, typing.Pattern):
+ return self._pattern_schema(obj)
+
+ res = self._get_prepare_pydantic_annotations_for_known_type(obj, ())
+ if res is not None:
+ source_type, annotations = res
+ return self._apply_annotations(source_type, annotations)
+
+ if self._arbitrary_types:
+ return self._arbitrary_type_schema(origin)
+ return self._unknown_type_schema(obj)
+
+ def _generate_td_field_schema(
+ self,
+ name: str,
+ field_info: FieldInfo,
+ decorators: DecoratorInfos,
+ *,
+ required: bool = True,
+ ) -> core_schema.TypedDictField:
+ """Prepare a TypedDictField to represent a model or typeddict field."""
+ common_field = self._common_field_schema(name, field_info, decorators)
+ return core_schema.typed_dict_field(
+ common_field['schema'],
+ required=False if not field_info.is_required() else required,
+ serialization_exclude=common_field['serialization_exclude'],
+ validation_alias=common_field['validation_alias'],
+ serialization_alias=common_field['serialization_alias'],
+ metadata=common_field['metadata'],
+ )
+
+ def _generate_md_field_schema(
+ self,
+ name: str,
+ field_info: FieldInfo,
+ decorators: DecoratorInfos,
+ ) -> core_schema.ModelField:
+ """Prepare a ModelField to represent a model field."""
+ common_field = self._common_field_schema(name, field_info, decorators)
+ return core_schema.model_field(
+ common_field['schema'],
+ serialization_exclude=common_field['serialization_exclude'],
+ validation_alias=common_field['validation_alias'],
+ serialization_alias=common_field['serialization_alias'],
+ frozen=common_field['frozen'],
+ metadata=common_field['metadata'],
+ )
+
+ def _generate_dc_field_schema(
+ self,
+ name: str,
+ field_info: FieldInfo,
+ decorators: DecoratorInfos,
+ ) -> core_schema.DataclassField:
+ """Prepare a DataclassField to represent the parameter/field, of a dataclass."""
+ common_field = self._common_field_schema(name, field_info, decorators)
+ return core_schema.dataclass_field(
+ name,
+ common_field['schema'],
+ init=field_info.init,
+ init_only=field_info.init_var or None,
+ kw_only=None if field_info.kw_only else False,
+ serialization_exclude=common_field['serialization_exclude'],
+ validation_alias=common_field['validation_alias'],
+ serialization_alias=common_field['serialization_alias'],
+ frozen=common_field['frozen'],
+ metadata=common_field['metadata'],
+ )
+
+ @staticmethod
+ def _apply_alias_generator_to_field_info(
+ alias_generator: Callable[[str], str] | AliasGenerator, field_info: FieldInfo, field_name: str
+ ) -> None:
+ """Apply an alias_generator to aliases on a FieldInfo instance if appropriate.
+
+ Args:
+ alias_generator: A callable that takes a string and returns a string, or an AliasGenerator instance.
+ field_info: The FieldInfo instance to which the alias_generator is (maybe) applied.
+ field_name: The name of the field from which to generate the alias.
+ """
+ # Apply an alias_generator if
+ # 1. An alias is not specified
+ # 2. An alias is specified, but the priority is <= 1
+ if (
+ field_info.alias_priority is None
+ or field_info.alias_priority <= 1
+ or field_info.alias is None
+ or field_info.validation_alias is None
+ or field_info.serialization_alias is None
+ ):
+ alias, validation_alias, serialization_alias = None, None, None
+
+ if isinstance(alias_generator, AliasGenerator):
+ alias, validation_alias, serialization_alias = alias_generator.generate_aliases(field_name)
+ elif isinstance(alias_generator, Callable):
+ alias = alias_generator(field_name)
+ if not isinstance(alias, str):
+ raise TypeError(f'alias_generator {alias_generator} must return str, not {alias.__class__}')
+
+ # if priority is not set, we set to 1
+ # which supports the case where the alias_generator from a child class is used
+ # to generate an alias for a field in a parent class
+ if field_info.alias_priority is None or field_info.alias_priority <= 1:
+ field_info.alias_priority = 1
+
+ # if the priority is 1, then we set the aliases to the generated alias
+ if field_info.alias_priority == 1:
+ field_info.serialization_alias = _get_first_non_null(serialization_alias, alias)
+ field_info.validation_alias = _get_first_non_null(validation_alias, alias)
+ field_info.alias = alias
+
+ # if any of the aliases are not set, then we set them to the corresponding generated alias
+ if field_info.alias is None:
+ field_info.alias = alias
+ if field_info.serialization_alias is None:
+ field_info.serialization_alias = _get_first_non_null(serialization_alias, alias)
+ if field_info.validation_alias is None:
+ field_info.validation_alias = _get_first_non_null(validation_alias, alias)
+
+ @staticmethod
+ def _apply_alias_generator_to_computed_field_info(
+ alias_generator: Callable[[str], str] | AliasGenerator,
+ computed_field_info: ComputedFieldInfo,
+ computed_field_name: str,
+ ):
+ """Apply an alias_generator to alias on a ComputedFieldInfo instance if appropriate.
+
+ Args:
+ alias_generator: A callable that takes a string and returns a string, or an AliasGenerator instance.
+ computed_field_info: The ComputedFieldInfo instance to which the alias_generator is (maybe) applied.
+ computed_field_name: The name of the computed field from which to generate the alias.
+ """
+ # Apply an alias_generator if
+ # 1. An alias is not specified
+ # 2. An alias is specified, but the priority is <= 1
+
+ if (
+ computed_field_info.alias_priority is None
+ or computed_field_info.alias_priority <= 1
+ or computed_field_info.alias is None
+ ):
+ alias, validation_alias, serialization_alias = None, None, None
+
+ if isinstance(alias_generator, AliasGenerator):
+ alias, validation_alias, serialization_alias = alias_generator.generate_aliases(computed_field_name)
+ elif isinstance(alias_generator, Callable):
+ alias = alias_generator(computed_field_name)
+ if not isinstance(alias, str):
+ raise TypeError(f'alias_generator {alias_generator} must return str, not {alias.__class__}')
+
+ # if priority is not set, we set to 1
+ # which supports the case where the alias_generator from a child class is used
+ # to generate an alias for a field in a parent class
+ if computed_field_info.alias_priority is None or computed_field_info.alias_priority <= 1:
+ computed_field_info.alias_priority = 1
+
+ # if the priority is 1, then we set the aliases to the generated alias
+ # note that we use the serialization_alias with priority over alias, as computed_field
+ # aliases are used for serialization only (not validation)
+ if computed_field_info.alias_priority == 1:
+ computed_field_info.alias = _get_first_non_null(serialization_alias, alias)
+
+ @staticmethod
+ def _apply_field_title_generator_to_field_info(
+ config_wrapper: ConfigWrapper, field_info: FieldInfo | ComputedFieldInfo, field_name: str
+ ) -> None:
+ """Apply a field_title_generator on a FieldInfo or ComputedFieldInfo instance if appropriate
+ Args:
+ config_wrapper: The config of the model
+ field_info: The FieldInfo or ComputedField instance to which the title_generator is (maybe) applied.
+ field_name: The name of the field from which to generate the title.
+ """
+ field_title_generator = field_info.field_title_generator or config_wrapper.field_title_generator
+
+ if field_title_generator is None:
+ return
+
+ if field_info.title is None:
+ title = field_title_generator(field_name, field_info) # type: ignore
+ if not isinstance(title, str):
+ raise TypeError(f'field_title_generator {field_title_generator} must return str, not {title.__class__}')
+
+ field_info.title = title
+
+ def _common_field_schema( # C901
+ self, name: str, field_info: FieldInfo, decorators: DecoratorInfos
+ ) -> _CommonField:
+ # Update FieldInfo annotation if appropriate:
+ FieldInfo = import_cached_field_info()
+ if not field_info.evaluated:
+ # TODO Can we use field_info.apply_typevars_map here?
+ try:
+ evaluated_type = _typing_extra.eval_type(field_info.annotation, *self._types_namespace)
+ except NameError as e:
+ raise PydanticUndefinedAnnotation.from_name_error(e) from e
+ evaluated_type = replace_types(evaluated_type, self._typevars_map)
+ field_info.evaluated = True
+ if not has_instance_in_type(evaluated_type, PydanticRecursiveRef):
+ new_field_info = FieldInfo.from_annotation(evaluated_type)
+ field_info.annotation = new_field_info.annotation
+
+ # Handle any field info attributes that may have been obtained from now-resolved annotations
+ for k, v in new_field_info._attributes_set.items():
+ # If an attribute is already set, it means it was set by assigning to a call to Field (or just a
+ # default value), and that should take the highest priority. So don't overwrite existing attributes.
+ # We skip over "attributes" that are present in the metadata_lookup dict because these won't
+ # actually end up as attributes of the `FieldInfo` instance.
+ if k not in field_info._attributes_set and k not in field_info.metadata_lookup:
+ setattr(field_info, k, v)
+
+ # Finally, ensure the field info also reflects all the `_attributes_set` that are actually metadata.
+ field_info.metadata = [*new_field_info.metadata, *field_info.metadata]
+
+ source_type, annotations = field_info.annotation, field_info.metadata
+
+ def set_discriminator(schema: CoreSchema) -> CoreSchema:
+ schema = self._apply_discriminator_to_union(schema, field_info.discriminator)
+ return schema
+
+ # Convert `@field_validator` decorators to `Before/After/Plain/WrapValidator` instances:
+ validators_from_decorators = []
+ for decorator in filter_field_decorator_info_by_field(decorators.field_validators.values(), name):
+ validators_from_decorators.append(_mode_to_validator[decorator.info.mode]._from_decorator(decorator))
+
+ with self.field_name_stack.push(name):
+ if field_info.discriminator is not None:
+ schema = self._apply_annotations(
+ source_type, annotations + validators_from_decorators, transform_inner_schema=set_discriminator
+ )
+ else:
+ schema = self._apply_annotations(
+ source_type,
+ annotations + validators_from_decorators,
+ )
+
+ # This V1 compatibility shim should eventually be removed
+ # push down any `each_item=True` validators
+ # note that this won't work for any Annotated types that get wrapped by a function validator
+ # but that's okay because that didn't exist in V1
+ this_field_validators = filter_field_decorator_info_by_field(decorators.validators.values(), name)
+ if _validators_require_validate_default(this_field_validators):
+ field_info.validate_default = True
+ each_item_validators = [v for v in this_field_validators if v.info.each_item is True]
+ this_field_validators = [v for v in this_field_validators if v not in each_item_validators]
+ schema = apply_each_item_validators(schema, each_item_validators, name)
+
+ schema = apply_validators(schema, this_field_validators, name)
+
+ # the default validator needs to go outside of any other validators
+ # so that it is the topmost validator for the field validator
+ # which uses it to check if the field has a default value or not
+ if not field_info.is_required():
+ schema = wrap_default(field_info, schema)
+
+ schema = self._apply_field_serializers(
+ schema, filter_field_decorator_info_by_field(decorators.field_serializers.values(), name)
+ )
+ self._apply_field_title_generator_to_field_info(self._config_wrapper, field_info, name)
+
+ pydantic_js_updates, pydantic_js_extra = _extract_json_schema_info_from_field_info(field_info)
+ core_metadata: dict[str, Any] = {}
+ update_core_metadata(
+ core_metadata, pydantic_js_updates=pydantic_js_updates, pydantic_js_extra=pydantic_js_extra
+ )
+
+ alias_generator = self._config_wrapper.alias_generator
+ if alias_generator is not None:
+ self._apply_alias_generator_to_field_info(alias_generator, field_info, name)
+
+ if isinstance(field_info.validation_alias, (AliasChoices, AliasPath)):
+ validation_alias = field_info.validation_alias.convert_to_aliases()
+ else:
+ validation_alias = field_info.validation_alias
+
+ return _common_field(
+ schema,
+ serialization_exclude=True if field_info.exclude else None,
+ validation_alias=validation_alias,
+ serialization_alias=field_info.serialization_alias,
+ frozen=field_info.frozen,
+ metadata=core_metadata,
+ )
+
+ def _union_schema(self, union_type: Any) -> core_schema.CoreSchema:
+ """Generate schema for a Union."""
+ args = self._get_args_resolving_forward_refs(union_type, required=True)
+ choices: list[CoreSchema] = []
+ nullable = False
+ for arg in args:
+ if arg is None or arg is _typing_extra.NoneType:
+ nullable = True
+ else:
+ choices.append(self.generate_schema(arg))
+
+ if len(choices) == 1:
+ s = choices[0]
+ else:
+ choices_with_tags: list[CoreSchema | tuple[CoreSchema, str]] = []
+ for choice in choices:
+ tag = choice.get('metadata', {}).get(_core_utils.TAGGED_UNION_TAG_KEY)
+ if tag is not None:
+ choices_with_tags.append((choice, tag))
+ else:
+ choices_with_tags.append(choice)
+ s = core_schema.union_schema(choices_with_tags)
+
+ if nullable:
+ s = core_schema.nullable_schema(s)
+ return s
+
+ def _type_alias_type_schema(self, obj: TypeAliasType) -> CoreSchema:
+ with self.defs.get_schema_or_ref(obj) as (ref, maybe_schema):
+ if maybe_schema is not None:
+ return maybe_schema
+
+ origin: TypeAliasType = get_origin(obj) or obj
+ typevars_map = get_standard_typevars_map(obj)
+
+ with self._ns_resolver.push(origin):
+ try:
+ annotation = _typing_extra.eval_type(origin.__value__, *self._types_namespace)
+ except NameError as e:
+ raise PydanticUndefinedAnnotation.from_name_error(e) from e
+ annotation = replace_types(annotation, typevars_map)
+ schema = self.generate_schema(annotation)
+ assert schema['type'] != 'definitions'
+ schema['ref'] = ref # type: ignore
+ self.defs.definitions[ref] = schema
+ return core_schema.definition_reference_schema(ref)
+
+ def _literal_schema(self, literal_type: Any) -> CoreSchema:
+ """Generate schema for a Literal."""
+ expected = _typing_extra.literal_values(literal_type)
+ assert expected, f'literal "expected" cannot be empty, obj={literal_type}'
+ schema = core_schema.literal_schema(expected)
+
+ if self._config_wrapper.use_enum_values and any(isinstance(v, Enum) for v in expected):
+ schema = core_schema.no_info_after_validator_function(
+ lambda v: v.value if isinstance(v, Enum) else v, schema
+ )
+
+ return schema
+
+ def _typed_dict_schema(self, typed_dict_cls: Any, origin: Any) -> core_schema.CoreSchema:
+ """Generate schema for a TypedDict.
+
+ It is not possible to track required/optional keys in TypedDict without __required_keys__
+ since TypedDict.__new__ erases the base classes (it replaces them with just `dict`)
+ and thus we can track usage of total=True/False
+ __required_keys__ was added in Python 3.9
+ (https://github.com/miss-islington/cpython/blob/1e9939657dd1f8eb9f596f77c1084d2d351172fc/Doc/library/typing.rst?plain=1#L1546-L1548)
+ however it is buggy
+ (https://github.com/python/typing_extensions/blob/ac52ac5f2cb0e00e7988bae1e2a1b8257ac88d6d/src/typing_extensions.py#L657-L666).
+
+ On 3.11 but < 3.12 TypedDict does not preserve inheritance information.
+
+ Hence to avoid creating validators that do not do what users expect we only
+ support typing.TypedDict on Python >= 3.12 or typing_extension.TypedDict on all versions
+ """
+ FieldInfo = import_cached_field_info()
+
+ with self.model_type_stack.push(typed_dict_cls), self.defs.get_schema_or_ref(typed_dict_cls) as (
+ typed_dict_ref,
+ maybe_schema,
+ ):
+ if maybe_schema is not None:
+ return maybe_schema
+
+ typevars_map = get_standard_typevars_map(typed_dict_cls)
+ if origin is not None:
+ typed_dict_cls = origin
+
+ if not _SUPPORTS_TYPEDDICT and type(typed_dict_cls).__module__ == 'typing':
+ raise PydanticUserError(
+ 'Please use `typing_extensions.TypedDict` instead of `typing.TypedDict` on Python < 3.12.',
+ code='typed-dict-version',
+ )
+
+ try:
+ # if a typed dictionary class doesn't have config, we use the parent's config, hence a default of `None`
+ # see https://github.com/pydantic/pydantic/issues/10917
+ config: ConfigDict | None = get_attribute_from_bases(typed_dict_cls, '__pydantic_config__')
+ except AttributeError:
+ config = None
+
+ with self._config_wrapper_stack.push(config):
+ core_config = self._config_wrapper.core_config(title=typed_dict_cls.__name__)
+
+ required_keys: frozenset[str] = typed_dict_cls.__required_keys__
+
+ fields: dict[str, core_schema.TypedDictField] = {}
+
+ decorators = DecoratorInfos.build(typed_dict_cls)
+
+ if self._config_wrapper.use_attribute_docstrings:
+ field_docstrings = extract_docstrings_from_cls(typed_dict_cls, use_inspect=True)
+ else:
+ field_docstrings = None
+
+ try:
+ annotations = _typing_extra.get_cls_type_hints(typed_dict_cls, ns_resolver=self._ns_resolver)
+ except NameError as e:
+ raise PydanticUndefinedAnnotation.from_name_error(e) from e
+
+ for field_name, annotation in annotations.items():
+ annotation = replace_types(annotation, typevars_map)
+ required = field_name in required_keys
+
+ if _typing_extra.is_required(annotation):
+ required = True
+ annotation = self._get_args_resolving_forward_refs(
+ annotation,
+ required=True,
+ )[0]
+ elif _typing_extra.is_not_required(annotation):
+ required = False
+ annotation = self._get_args_resolving_forward_refs(
+ annotation,
+ required=True,
+ )[0]
+
+ field_info = FieldInfo.from_annotation(annotation)
+ if (
+ field_docstrings is not None
+ and field_info.description is None
+ and field_name in field_docstrings
+ ):
+ field_info.description = field_docstrings[field_name]
+ self._apply_field_title_generator_to_field_info(self._config_wrapper, field_info, field_name)
+ fields[field_name] = self._generate_td_field_schema(
+ field_name, field_info, decorators, required=required
+ )
+
+ td_schema = core_schema.typed_dict_schema(
+ fields,
+ cls=typed_dict_cls,
+ computed_fields=[
+ self._computed_field_schema(d, decorators.field_serializers)
+ for d in decorators.computed_fields.values()
+ ],
+ ref=typed_dict_ref,
+ config=core_config,
+ )
+
+ schema = self._apply_model_serializers(td_schema, decorators.model_serializers.values())
+ schema = apply_model_validators(schema, decorators.model_validators.values(), 'all')
+ self.defs.definitions[typed_dict_ref] = schema
+ return core_schema.definition_reference_schema(typed_dict_ref)
+
+ def _namedtuple_schema(self, namedtuple_cls: Any, origin: Any) -> core_schema.CoreSchema:
+ """Generate schema for a NamedTuple."""
+ with self.model_type_stack.push(namedtuple_cls), self.defs.get_schema_or_ref(namedtuple_cls) as (
+ namedtuple_ref,
+ maybe_schema,
+ ):
+ if maybe_schema is not None:
+ return maybe_schema
+ typevars_map = get_standard_typevars_map(namedtuple_cls)
+ if origin is not None:
+ namedtuple_cls = origin
+
+ try:
+ annotations = _typing_extra.get_cls_type_hints(namedtuple_cls, ns_resolver=self._ns_resolver)
+ except NameError as e:
+ raise PydanticUndefinedAnnotation.from_name_error(e) from e
+ if not annotations:
+ # annotations is empty, happens if namedtuple_cls defined via collections.namedtuple(...)
+ annotations: dict[str, Any] = {k: Any for k in namedtuple_cls._fields}
+
+ if typevars_map:
+ annotations = {
+ field_name: replace_types(annotation, typevars_map)
+ for field_name, annotation in annotations.items()
+ }
+
+ arguments_schema = core_schema.arguments_schema(
+ [
+ self._generate_parameter_schema(
+ field_name,
+ annotation,
+ default=namedtuple_cls._field_defaults.get(field_name, Parameter.empty),
+ )
+ for field_name, annotation in annotations.items()
+ ],
+ metadata={'pydantic_js_prefer_positional_arguments': True},
+ )
+ return core_schema.call_schema(arguments_schema, namedtuple_cls, ref=namedtuple_ref)
+
+ def _generate_parameter_schema(
+ self,
+ name: str,
+ annotation: type[Any],
+ default: Any = Parameter.empty,
+ mode: Literal['positional_only', 'positional_or_keyword', 'keyword_only'] | None = None,
+ ) -> core_schema.ArgumentsParameter:
+ """Prepare a ArgumentsParameter to represent a field in a namedtuple or function signature."""
+ FieldInfo = import_cached_field_info()
+
+ if default is Parameter.empty:
+ field = FieldInfo.from_annotation(annotation)
+ else:
+ field = FieldInfo.from_annotated_attribute(annotation, default)
+ assert field.annotation is not None, 'field.annotation should not be None when generating a schema'
+ with self.field_name_stack.push(name):
+ schema = self._apply_annotations(field.annotation, [field])
+
+ if not field.is_required():
+ schema = wrap_default(field, schema)
+
+ parameter_schema = core_schema.arguments_parameter(name, schema)
+ if mode is not None:
+ parameter_schema['mode'] = mode
+ if field.alias is not None:
+ parameter_schema['alias'] = field.alias
+ else:
+ alias_generator = self._config_wrapper.alias_generator
+ if isinstance(alias_generator, AliasGenerator) and alias_generator.alias is not None:
+ parameter_schema['alias'] = alias_generator.alias(name)
+ elif isinstance(alias_generator, Callable):
+ parameter_schema['alias'] = alias_generator(name)
+ return parameter_schema
+
+ def _tuple_schema(self, tuple_type: Any) -> core_schema.CoreSchema:
+ """Generate schema for a Tuple, e.g. `tuple[int, str]` or `tuple[int, ...]`."""
+ # TODO: do we really need to resolve type vars here?
+ typevars_map = get_standard_typevars_map(tuple_type)
+ params = self._get_args_resolving_forward_refs(tuple_type)
+
+ if typevars_map and params:
+ params = tuple(replace_types(param, typevars_map) for param in params)
+
+ # NOTE: subtle difference: `tuple[()]` gives `params=()`, whereas `typing.Tuple[()]` gives `params=((),)`
+ # This is only true for <3.11, on Python 3.11+ `typing.Tuple[()]` gives `params=()`
+ if not params:
+ if tuple_type in TUPLE_TYPES:
+ return core_schema.tuple_schema([core_schema.any_schema()], variadic_item_index=0)
+ else:
+ # special case for `tuple[()]` which means `tuple[]` - an empty tuple
+ return core_schema.tuple_schema([])
+ elif params[-1] is Ellipsis:
+ if len(params) == 2:
+ return core_schema.tuple_schema([self.generate_schema(params[0])], variadic_item_index=0)
+ else:
+ # TODO: something like https://github.com/pydantic/pydantic/issues/5952
+ raise ValueError('Variable tuples can only have one type')
+ elif len(params) == 1 and params[0] == ():
+ # special case for `Tuple[()]` which means `Tuple[]` - an empty tuple
+ # NOTE: This conditional can be removed when we drop support for Python 3.10.
+ return core_schema.tuple_schema([])
+ else:
+ return core_schema.tuple_schema([self.generate_schema(param) for param in params])
+
+ def _type_schema(self) -> core_schema.CoreSchema:
+ return core_schema.custom_error_schema(
+ core_schema.is_instance_schema(type),
+ custom_error_type='is_type',
+ custom_error_message='Input should be a type',
+ )
+
+ def _zoneinfo_schema(self) -> core_schema.CoreSchema:
+ """Generate schema for a zone_info.ZoneInfo object"""
+ # we're def >=py3.9 if ZoneInfo was included in input
+ if sys.version_info < (3, 9):
+ assert False, 'Unreachable'
+
+ # import in this path is safe
+ from zoneinfo import ZoneInfo, ZoneInfoNotFoundError
+
+ def validate_str_is_valid_iana_tz(value: Any, /) -> ZoneInfo:
+ if isinstance(value, ZoneInfo):
+ return value
+ try:
+ return ZoneInfo(value)
+ except (ZoneInfoNotFoundError, ValueError, TypeError):
+ raise PydanticCustomError('zoneinfo_str', 'invalid timezone: {value}', {'value': value})
+
+ metadata = {'pydantic_js_functions': [lambda _1, _2: {'type': 'string', 'format': 'zoneinfo'}]}
+ return core_schema.no_info_plain_validator_function(
+ validate_str_is_valid_iana_tz,
+ serialization=core_schema.to_string_ser_schema(),
+ metadata=metadata,
+ )
+
+ def _union_is_subclass_schema(self, union_type: Any) -> core_schema.CoreSchema:
+ """Generate schema for `Type[Union[X, ...]]`."""
+ args = self._get_args_resolving_forward_refs(union_type, required=True)
+ return core_schema.union_schema([self.generate_schema(typing.Type[args]) for args in args])
+
+ def _subclass_schema(self, type_: Any) -> core_schema.CoreSchema:
+ """Generate schema for a Type, e.g. `Type[int]`."""
+ type_param = self._get_first_arg_or_any(type_)
+
+ # Assume `type[Annotated[<typ>, ...]]` is equivalent to `type[<typ>]`:
+ type_param = _typing_extra.annotated_type(type_param) or type_param
+
+ if _typing_extra.is_any(type_param):
+ return self._type_schema()
+ elif _typing_extra.is_type_alias_type(type_param):
+ return self.generate_schema(typing.Type[type_param.__value__])
+ elif isinstance(type_param, typing.TypeVar):
+ if type_param.__bound__:
+ if _typing_extra.origin_is_union(get_origin(type_param.__bound__)):
+ return self._union_is_subclass_schema(type_param.__bound__)
+ return core_schema.is_subclass_schema(type_param.__bound__)
+ elif type_param.__constraints__:
+ return core_schema.union_schema(
+ [self.generate_schema(typing.Type[c]) for c in type_param.__constraints__]
+ )
+ else:
+ return self._type_schema()
+ elif _typing_extra.origin_is_union(get_origin(type_param)):
+ return self._union_is_subclass_schema(type_param)
+ else:
+ if _typing_extra.is_self(type_param):
+ type_param = self._resolve_self_type(type_param)
+
+ if not inspect.isclass(type_param):
+ raise TypeError(f'Expected a class, got {type_param!r}')
+ return core_schema.is_subclass_schema(type_param)
+
+ def _sequence_schema(self, items_type: Any) -> core_schema.CoreSchema:
+ """Generate schema for a Sequence, e.g. `Sequence[int]`."""
+ from ._serializers import serialize_sequence_via_list
+
+ item_type_schema = self.generate_schema(items_type)
+ list_schema = core_schema.list_schema(item_type_schema)
+
+ json_schema = smart_deepcopy(list_schema)
+ python_schema = core_schema.is_instance_schema(typing.Sequence, cls_repr='Sequence')
+ if not _typing_extra.is_any(items_type):
+ from ._validators import sequence_validator
+
+ python_schema = core_schema.chain_schema(
+ [python_schema, core_schema.no_info_wrap_validator_function(sequence_validator, list_schema)],
+ )
+
+ serialization = core_schema.wrap_serializer_function_ser_schema(
+ serialize_sequence_via_list, schema=item_type_schema, info_arg=True
+ )
+ return core_schema.json_or_python_schema(
+ json_schema=json_schema, python_schema=python_schema, serialization=serialization
+ )
+
+ def _iterable_schema(self, type_: Any) -> core_schema.GeneratorSchema:
+ """Generate a schema for an `Iterable`."""
+ item_type = self._get_first_arg_or_any(type_)
+
+ return core_schema.generator_schema(self.generate_schema(item_type))
+
+ def _pattern_schema(self, pattern_type: Any) -> core_schema.CoreSchema:
+ from . import _validators
+
+ metadata = {'pydantic_js_functions': [lambda _1, _2: {'type': 'string', 'format': 'regex'}]}
+ ser = core_schema.plain_serializer_function_ser_schema(
+ attrgetter('pattern'), when_used='json', return_schema=core_schema.str_schema()
+ )
+ if pattern_type is typing.Pattern or pattern_type is re.Pattern:
+ # bare type
+ return core_schema.no_info_plain_validator_function(
+ _validators.pattern_either_validator, serialization=ser, metadata=metadata
+ )
+
+ param = self._get_args_resolving_forward_refs(
+ pattern_type,
+ required=True,
+ )[0]
+ if param is str:
+ return core_schema.no_info_plain_validator_function(
+ _validators.pattern_str_validator, serialization=ser, metadata=metadata
+ )
+ elif param is bytes:
+ return core_schema.no_info_plain_validator_function(
+ _validators.pattern_bytes_validator, serialization=ser, metadata=metadata
+ )
+ else:
+ raise PydanticSchemaGenerationError(f'Unable to generate pydantic-core schema for {pattern_type!r}.')
+
+ def _hashable_schema(self) -> core_schema.CoreSchema:
+ return core_schema.custom_error_schema(
+ schema=core_schema.json_or_python_schema(
+ json_schema=core_schema.chain_schema(
+ [core_schema.any_schema(), core_schema.is_instance_schema(collections.abc.Hashable)]
+ ),
+ python_schema=core_schema.is_instance_schema(collections.abc.Hashable),
+ ),
+ custom_error_type='is_hashable',
+ custom_error_message='Input should be hashable',
+ )
+
+ def _dataclass_schema(
+ self, dataclass: type[StandardDataclass], origin: type[StandardDataclass] | None
+ ) -> core_schema.CoreSchema:
+ """Generate schema for a dataclass."""
+ with self.model_type_stack.push(dataclass), self.defs.get_schema_or_ref(dataclass) as (
+ dataclass_ref,
+ maybe_schema,
+ ):
+ if maybe_schema is not None:
+ return maybe_schema
+
+ typevars_map = get_standard_typevars_map(dataclass)
+ if origin is not None:
+ dataclass = origin
+
+ # if (plain) dataclass doesn't have config, we use the parent's config, hence a default of `None`
+ # (Pydantic dataclasses have an empty dict config by default).
+ # see https://github.com/pydantic/pydantic/issues/10917
+ config = getattr(dataclass, '__pydantic_config__', None)
+
+ from ..dataclasses import is_pydantic_dataclass
+
+ with self._ns_resolver.push(dataclass), self._config_wrapper_stack.push(config):
+ if is_pydantic_dataclass(dataclass):
+ fields = deepcopy(dataclass.__pydantic_fields__)
+ if typevars_map:
+ for field in fields.values():
+ field.apply_typevars_map(typevars_map, *self._types_namespace)
+ else:
+ fields = collect_dataclass_fields(
+ dataclass,
+ typevars_map=typevars_map,
+ )
+
+ if self._config_wrapper.extra == 'allow':
+ # disallow combination of init=False on a dataclass field and extra='allow' on a dataclass
+ for field_name, field in fields.items():
+ if field.init is False:
+ raise PydanticUserError(
+ f'Field {field_name} has `init=False` and dataclass has config setting `extra="allow"`. '
+ f'This combination is not allowed.',
+ code='dataclass-init-false-extra-allow',
+ )
+
+ decorators = dataclass.__dict__.get('__pydantic_decorators__') or DecoratorInfos.build(dataclass)
+ # Move kw_only=False args to the start of the list, as this is how vanilla dataclasses work.
+ # Note that when kw_only is missing or None, it is treated as equivalent to kw_only=True
+ args = sorted(
+ (self._generate_dc_field_schema(k, v, decorators) for k, v in fields.items()),
+ key=lambda a: a.get('kw_only') is not False,
+ )
+ has_post_init = hasattr(dataclass, '__post_init__')
+ has_slots = hasattr(dataclass, '__slots__')
+
+ args_schema = core_schema.dataclass_args_schema(
+ dataclass.__name__,
+ args,
+ computed_fields=[
+ self._computed_field_schema(d, decorators.field_serializers)
+ for d in decorators.computed_fields.values()
+ ],
+ collect_init_only=has_post_init,
+ )
+
+ inner_schema = apply_validators(args_schema, decorators.root_validators.values(), None)
+
+ model_validators = decorators.model_validators.values()
+ inner_schema = apply_model_validators(inner_schema, model_validators, 'inner')
+
+ core_config = self._config_wrapper.core_config(title=dataclass.__name__)
+
+ dc_schema = core_schema.dataclass_schema(
+ dataclass,
+ inner_schema,
+ generic_origin=origin,
+ post_init=has_post_init,
+ ref=dataclass_ref,
+ fields=[field.name for field in dataclasses.fields(dataclass)],
+ slots=has_slots,
+ config=core_config,
+ # we don't use a custom __setattr__ for dataclasses, so we must
+ # pass along the frozen config setting to the pydantic-core schema
+ frozen=self._config_wrapper_stack.tail.frozen,
+ )
+ schema = self._apply_model_serializers(dc_schema, decorators.model_serializers.values())
+ schema = apply_model_validators(schema, model_validators, 'outer')
+ self.defs.definitions[dataclass_ref] = schema
+ return core_schema.definition_reference_schema(dataclass_ref)
+
+ def _call_schema(self, function: ValidateCallSupportedTypes) -> core_schema.CallSchema:
+ """Generate schema for a Callable.
+
+ TODO support functional validators once we support them in Config
+ """
+ sig = signature(function)
+ globalns, localns = self._types_namespace
+ type_hints = _typing_extra.get_function_type_hints(function, globalns=globalns, localns=localns)
+
+ mode_lookup: dict[_ParameterKind, Literal['positional_only', 'positional_or_keyword', 'keyword_only']] = {
+ Parameter.POSITIONAL_ONLY: 'positional_only',
+ Parameter.POSITIONAL_OR_KEYWORD: 'positional_or_keyword',
+ Parameter.KEYWORD_ONLY: 'keyword_only',
+ }
+
+ arguments_list: list[core_schema.ArgumentsParameter] = []
+ var_args_schema: core_schema.CoreSchema | None = None
+ var_kwargs_schema: core_schema.CoreSchema | None = None
+ var_kwargs_mode: core_schema.VarKwargsMode | None = None
+
+ for name, p in sig.parameters.items():
+ if p.annotation is sig.empty:
+ annotation = typing.cast(Any, Any)
+ else:
+ annotation = type_hints[name]
+
+ parameter_mode = mode_lookup.get(p.kind)
+ if parameter_mode is not None:
+ arg_schema = self._generate_parameter_schema(name, annotation, p.default, parameter_mode)
+ arguments_list.append(arg_schema)
+ elif p.kind == Parameter.VAR_POSITIONAL:
+ var_args_schema = self.generate_schema(annotation)
+ else:
+ assert p.kind == Parameter.VAR_KEYWORD, p.kind
+
+ unpack_type = _typing_extra.unpack_type(annotation)
+ if unpack_type is not None:
+ if not is_typeddict(unpack_type):
+ raise PydanticUserError(
+ f'Expected a `TypedDict` class, got {unpack_type.__name__!r}', code='unpack-typed-dict'
+ )
+ non_pos_only_param_names = {
+ name for name, p in sig.parameters.items() if p.kind != Parameter.POSITIONAL_ONLY
+ }
+ overlapping_params = non_pos_only_param_names.intersection(unpack_type.__annotations__)
+ if overlapping_params:
+ raise PydanticUserError(
+ f'Typed dictionary {unpack_type.__name__!r} overlaps with parameter'
+ f"{'s' if len(overlapping_params) >= 2 else ''} "
+ f"{', '.join(repr(p) for p in sorted(overlapping_params))}",
+ code='overlapping-unpack-typed-dict',
+ )
+
+ var_kwargs_mode = 'unpacked-typed-dict'
+ var_kwargs_schema = self._typed_dict_schema(unpack_type, None)
+ else:
+ var_kwargs_mode = 'uniform'
+ var_kwargs_schema = self.generate_schema(annotation)
+
+ return_schema: core_schema.CoreSchema | None = None
+ config_wrapper = self._config_wrapper
+ if config_wrapper.validate_return:
+ return_hint = sig.return_annotation
+ if return_hint is not sig.empty:
+ return_schema = self.generate_schema(return_hint)
+
+ return core_schema.call_schema(
+ core_schema.arguments_schema(
+ arguments_list,
+ var_args_schema=var_args_schema,
+ var_kwargs_mode=var_kwargs_mode,
+ var_kwargs_schema=var_kwargs_schema,
+ populate_by_name=config_wrapper.populate_by_name,
+ ),
+ function,
+ return_schema=return_schema,
+ )
+
+ def _unsubstituted_typevar_schema(self, typevar: typing.TypeVar) -> core_schema.CoreSchema:
+ assert isinstance(typevar, typing.TypeVar)
+
+ bound = typevar.__bound__
+ constraints = typevar.__constraints__
+
+ try:
+ typevar_has_default = typevar.has_default() # type: ignore
+ except AttributeError:
+ # could still have a default if it's an old version of typing_extensions.TypeVar
+ typevar_has_default = getattr(typevar, '__default__', None) is not None
+
+ if (bound is not None) + (len(constraints) != 0) + typevar_has_default > 1:
+ raise NotImplementedError(
+ 'Pydantic does not support mixing more than one of TypeVar bounds, constraints and defaults'
+ )
+
+ if typevar_has_default:
+ return self.generate_schema(typevar.__default__) # type: ignore
+ elif constraints:
+ return self._union_schema(typing.Union[constraints]) # type: ignore
+ elif bound:
+ schema = self.generate_schema(bound)
+ schema['serialization'] = core_schema.wrap_serializer_function_ser_schema(
+ lambda x, h: h(x), schema=core_schema.any_schema()
+ )
+ return schema
+ else:
+ return core_schema.any_schema()
+
+ def _computed_field_schema(
+ self,
+ d: Decorator[ComputedFieldInfo],
+ field_serializers: dict[str, Decorator[FieldSerializerDecoratorInfo]],
+ ) -> core_schema.ComputedField:
+ try:
+ # Do not pass in globals as the function could be defined in a different module.
+ # Instead, let `get_function_return_type` infer the globals to use, but still pass
+ # in locals that may contain a parent/rebuild namespace:
+ return_type = _decorators.get_function_return_type(
+ d.func, d.info.return_type, localns=self._types_namespace.locals
+ )
+ except NameError as e:
+ raise PydanticUndefinedAnnotation.from_name_error(e) from e
+ if return_type is PydanticUndefined:
+ raise PydanticUserError(
+ 'Computed field is missing return type annotation or specifying `return_type`'
+ ' to the `@computed_field` decorator (e.g. `@computed_field(return_type=int|str)`)',
+ code='model-field-missing-annotation',
+ )
+
+ return_type = replace_types(return_type, self._typevars_map)
+ # Create a new ComputedFieldInfo so that different type parametrizations of the same
+ # generic model's computed field can have different return types.
+ d.info = dataclasses.replace(d.info, return_type=return_type)
+ return_type_schema = self.generate_schema(return_type)
+ # Apply serializers to computed field if there exist
+ return_type_schema = self._apply_field_serializers(
+ return_type_schema,
+ filter_field_decorator_info_by_field(field_serializers.values(), d.cls_var_name),
+ )
+
+ alias_generator = self._config_wrapper.alias_generator
+ if alias_generator is not None:
+ self._apply_alias_generator_to_computed_field_info(
+ alias_generator=alias_generator, computed_field_info=d.info, computed_field_name=d.cls_var_name
+ )
+ self._apply_field_title_generator_to_field_info(self._config_wrapper, d.info, d.cls_var_name)
+
+ pydantic_js_updates, pydantic_js_extra = _extract_json_schema_info_from_field_info(d.info)
+ core_metadata: dict[str, Any] = {}
+ update_core_metadata(
+ core_metadata,
+ pydantic_js_updates={'readOnly': True, **(pydantic_js_updates if pydantic_js_updates else {})},
+ pydantic_js_extra=pydantic_js_extra,
+ )
+ return core_schema.computed_field(
+ d.cls_var_name, return_schema=return_type_schema, alias=d.info.alias, metadata=core_metadata
+ )
+
+ def _annotated_schema(self, annotated_type: Any) -> core_schema.CoreSchema:
+ """Generate schema for an Annotated type, e.g. `Annotated[int, Field(...)]` or `Annotated[int, Gt(0)]`."""
+ FieldInfo = import_cached_field_info()
+
+ source_type, *annotations = self._get_args_resolving_forward_refs(
+ annotated_type,
+ required=True,
+ )
+ schema = self._apply_annotations(source_type, annotations)
+ # put the default validator last so that TypeAdapter.get_default_value() works
+ # even if there are function validators involved
+ for annotation in annotations:
+ if isinstance(annotation, FieldInfo):
+ schema = wrap_default(annotation, schema)
+ return schema
+
+ def _get_prepare_pydantic_annotations_for_known_type(
+ self, obj: Any, annotations: tuple[Any, ...]
+ ) -> tuple[Any, list[Any]] | None:
+ from ._std_types_schema import (
+ deque_schema_prepare_pydantic_annotations,
+ mapping_like_prepare_pydantic_annotations,
+ path_schema_prepare_pydantic_annotations,
+ )
+
+ # Check for hashability
+ try:
+ hash(obj)
+ except TypeError:
+ # obj is definitely not a known type if this fails
+ return None
+
+ # TODO: I'd rather we didn't handle the generic nature in the annotations prep, but the same way we do other
+ # generic types like list[str] via _match_generic_type, but I'm not sure if we can do that because this is
+ # not always called from match_type, but sometimes from _apply_annotations
+ obj_origin = get_origin(obj) or obj
+
+ if obj_origin in PATH_TYPES:
+ return path_schema_prepare_pydantic_annotations(obj, annotations)
+ elif obj_origin in DEQUE_TYPES:
+ return deque_schema_prepare_pydantic_annotations(obj, annotations)
+ elif obj_origin in MAPPING_TYPES:
+ return mapping_like_prepare_pydantic_annotations(obj, annotations)
+ else:
+ return None
+
+ def _apply_annotations(
+ self,
+ source_type: Any,
+ annotations: list[Any],
+ transform_inner_schema: Callable[[CoreSchema], CoreSchema] = lambda x: x,
+ ) -> CoreSchema:
+ """Apply arguments from `Annotated` or from `FieldInfo` to a schema.
+
+ This gets called by `GenerateSchema._annotated_schema` but differs from it in that it does
+ not expect `source_type` to be an `Annotated` object, it expects it to be the first argument of that
+ (in other words, `GenerateSchema._annotated_schema` just unpacks `Annotated`, this process it).
+ """
+ annotations = list(_known_annotated_metadata.expand_grouped_metadata(annotations))
+ res = self._get_prepare_pydantic_annotations_for_known_type(source_type, tuple(annotations))
+ if res is not None:
+ source_type, annotations = res
+
+ pydantic_js_annotation_functions: list[GetJsonSchemaFunction] = []
+
+ def inner_handler(obj: Any) -> CoreSchema:
+ from_property = self._generate_schema_from_property(obj, source_type)
+ if from_property is None:
+ schema = self._generate_schema_inner(obj)
+ else:
+ schema = from_property
+ metadata_js_function = _extract_get_pydantic_json_schema(obj, schema)
+ if metadata_js_function is not None:
+ metadata_schema = resolve_original_schema(schema, self.defs.definitions)
+ if metadata_schema is not None:
+ self._add_js_function(metadata_schema, metadata_js_function)
+ return transform_inner_schema(schema)
+
+ get_inner_schema = CallbackGetCoreSchemaHandler(inner_handler, self)
+
+ for annotation in annotations:
+ if annotation is None:
+ continue
+ get_inner_schema = self._get_wrapped_inner_schema(
+ get_inner_schema, annotation, pydantic_js_annotation_functions
+ )
+
+ schema = get_inner_schema(source_type)
+ if pydantic_js_annotation_functions:
+ core_metadata = schema.setdefault('metadata', {})
+ update_core_metadata(core_metadata, pydantic_js_annotation_functions=pydantic_js_annotation_functions)
+ return _add_custom_serialization_from_json_encoders(self._config_wrapper.json_encoders, source_type, schema)
+
+ def _apply_single_annotation(self, schema: core_schema.CoreSchema, metadata: Any) -> core_schema.CoreSchema:
+ FieldInfo = import_cached_field_info()
+
+ if isinstance(metadata, FieldInfo):
+ for field_metadata in metadata.metadata:
+ schema = self._apply_single_annotation(schema, field_metadata)
+
+ if metadata.discriminator is not None:
+ schema = self._apply_discriminator_to_union(schema, metadata.discriminator)
+ return schema
+
+ if schema['type'] == 'nullable':
+ # for nullable schemas, metadata is automatically applied to the inner schema
+ inner = schema.get('schema', core_schema.any_schema())
+ inner = self._apply_single_annotation(inner, metadata)
+ if inner:
+ schema['schema'] = inner
+ return schema
+
+ original_schema = schema
+ ref = schema.get('ref', None)
+ if ref is not None:
+ schema = schema.copy()
+ new_ref = ref + f'_{repr(metadata)}'
+ if new_ref in self.defs.definitions:
+ return self.defs.definitions[new_ref]
+ schema['ref'] = new_ref # type: ignore
+ elif schema['type'] == 'definition-ref':
+ ref = schema['schema_ref']
+ if ref in self.defs.definitions:
+ schema = self.defs.definitions[ref].copy()
+ new_ref = ref + f'_{repr(metadata)}'
+ if new_ref in self.defs.definitions:
+ return self.defs.definitions[new_ref]
+ schema['ref'] = new_ref # type: ignore
+
+ maybe_updated_schema = _known_annotated_metadata.apply_known_metadata(metadata, schema.copy())
+
+ if maybe_updated_schema is not None:
+ return maybe_updated_schema
+ return original_schema
+
+ def _apply_single_annotation_json_schema(
+ self, schema: core_schema.CoreSchema, metadata: Any
+ ) -> core_schema.CoreSchema:
+ FieldInfo = import_cached_field_info()
+
+ if isinstance(metadata, FieldInfo):
+ for field_metadata in metadata.metadata:
+ schema = self._apply_single_annotation_json_schema(schema, field_metadata)
+
+ pydantic_js_updates, pydantic_js_extra = _extract_json_schema_info_from_field_info(metadata)
+ core_metadata = schema.setdefault('metadata', {})
+ update_core_metadata(
+ core_metadata, pydantic_js_updates=pydantic_js_updates, pydantic_js_extra=pydantic_js_extra
+ )
+ return schema
+
+ def _get_wrapped_inner_schema(
+ self,
+ get_inner_schema: GetCoreSchemaHandler,
+ annotation: Any,
+ pydantic_js_annotation_functions: list[GetJsonSchemaFunction],
+ ) -> CallbackGetCoreSchemaHandler:
+ metadata_get_schema: GetCoreSchemaFunction = getattr(annotation, '__get_pydantic_core_schema__', None) or (
+ lambda source, handler: handler(source)
+ )
+
+ def new_handler(source: Any) -> core_schema.CoreSchema:
+ schema = metadata_get_schema(source, get_inner_schema)
+ schema = self._apply_single_annotation(schema, annotation)
+ schema = self._apply_single_annotation_json_schema(schema, annotation)
+
+ metadata_js_function = _extract_get_pydantic_json_schema(annotation, schema)
+ if metadata_js_function is not None:
+ pydantic_js_annotation_functions.append(metadata_js_function)
+ return schema
+
+ return CallbackGetCoreSchemaHandler(new_handler, self)
+
+ def _apply_field_serializers(
+ self,
+ schema: core_schema.CoreSchema,
+ serializers: list[Decorator[FieldSerializerDecoratorInfo]],
+ ) -> core_schema.CoreSchema:
+ """Apply field serializers to a schema."""
+ if serializers:
+ schema = copy(schema)
+ if schema['type'] == 'definitions':
+ inner_schema = schema['schema']
+ schema['schema'] = self._apply_field_serializers(inner_schema, serializers)
+ return schema
+ else:
+ ref = typing.cast('str|None', schema.get('ref', None))
+ if ref is not None:
+ self.defs.definitions[ref] = schema
+ schema = core_schema.definition_reference_schema(ref)
+
+ # use the last serializer to make it easy to override a serializer set on a parent model
+ serializer = serializers[-1]
+ is_field_serializer, info_arg = inspect_field_serializer(serializer.func, serializer.info.mode)
+
+ try:
+ # Do not pass in globals as the function could be defined in a different module.
+ # Instead, let `get_function_return_type` infer the globals to use, but still pass
+ # in locals that may contain a parent/rebuild namespace:
+ return_type = _decorators.get_function_return_type(
+ serializer.func, serializer.info.return_type, localns=self._types_namespace.locals
+ )
+ except NameError as e:
+ raise PydanticUndefinedAnnotation.from_name_error(e) from e
+
+ if return_type is PydanticUndefined:
+ return_schema = None
+ else:
+ return_schema = self.generate_schema(return_type)
+
+ if serializer.info.mode == 'wrap':
+ schema['serialization'] = core_schema.wrap_serializer_function_ser_schema(
+ serializer.func,
+ is_field_serializer=is_field_serializer,
+ info_arg=info_arg,
+ return_schema=return_schema,
+ when_used=serializer.info.when_used,
+ )
+ else:
+ assert serializer.info.mode == 'plain'
+ schema['serialization'] = core_schema.plain_serializer_function_ser_schema(
+ serializer.func,
+ is_field_serializer=is_field_serializer,
+ info_arg=info_arg,
+ return_schema=return_schema,
+ when_used=serializer.info.when_used,
+ )
+ return schema
+
+ def _apply_model_serializers(
+ self, schema: core_schema.CoreSchema, serializers: Iterable[Decorator[ModelSerializerDecoratorInfo]]
+ ) -> core_schema.CoreSchema:
+ """Apply model serializers to a schema."""
+ ref: str | None = schema.pop('ref', None) # type: ignore
+ if serializers:
+ serializer = list(serializers)[-1]
+ info_arg = inspect_model_serializer(serializer.func, serializer.info.mode)
+
+ try:
+ # Do not pass in globals as the function could be defined in a different module.
+ # Instead, let `get_function_return_type` infer the globals to use, but still pass
+ # in locals that may contain a parent/rebuild namespace:
+ return_type = _decorators.get_function_return_type(
+ serializer.func, serializer.info.return_type, localns=self._types_namespace.locals
+ )
+ except NameError as e:
+ raise PydanticUndefinedAnnotation.from_name_error(e) from e
+ if return_type is PydanticUndefined:
+ return_schema = None
+ else:
+ return_schema = self.generate_schema(return_type)
+
+ if serializer.info.mode == 'wrap':
+ ser_schema: core_schema.SerSchema = core_schema.wrap_serializer_function_ser_schema(
+ serializer.func,
+ info_arg=info_arg,
+ return_schema=return_schema,
+ when_used=serializer.info.when_used,
+ )
+ else:
+ # plain
+ ser_schema = core_schema.plain_serializer_function_ser_schema(
+ serializer.func,
+ info_arg=info_arg,
+ return_schema=return_schema,
+ when_used=serializer.info.when_used,
+ )
+ schema['serialization'] = ser_schema
+ if ref:
+ schema['ref'] = ref # type: ignore
+ return schema
+
+
+_VALIDATOR_F_MATCH: Mapping[
+ tuple[FieldValidatorModes, Literal['no-info', 'with-info']],
+ Callable[[Callable[..., Any], core_schema.CoreSchema, str | None], core_schema.CoreSchema],
+] = {
+ ('before', 'no-info'): lambda f, schema, _: core_schema.no_info_before_validator_function(f, schema),
+ ('after', 'no-info'): lambda f, schema, _: core_schema.no_info_after_validator_function(f, schema),
+ ('plain', 'no-info'): lambda f, _1, _2: core_schema.no_info_plain_validator_function(f),
+ ('wrap', 'no-info'): lambda f, schema, _: core_schema.no_info_wrap_validator_function(f, schema),
+ ('before', 'with-info'): lambda f, schema, field_name: core_schema.with_info_before_validator_function(
+ f, schema, field_name=field_name
+ ),
+ ('after', 'with-info'): lambda f, schema, field_name: core_schema.with_info_after_validator_function(
+ f, schema, field_name=field_name
+ ),
+ ('plain', 'with-info'): lambda f, _, field_name: core_schema.with_info_plain_validator_function(
+ f, field_name=field_name
+ ),
+ ('wrap', 'with-info'): lambda f, schema, field_name: core_schema.with_info_wrap_validator_function(
+ f, schema, field_name=field_name
+ ),
+}
+
+
+# TODO V3: this function is only used for deprecated decorators. It should
+# be removed once we drop support for those.
+def apply_validators(
+ schema: core_schema.CoreSchema,
+ validators: Iterable[Decorator[RootValidatorDecoratorInfo]]
+ | Iterable[Decorator[ValidatorDecoratorInfo]]
+ | Iterable[Decorator[FieldValidatorDecoratorInfo]],
+ field_name: str | None,
+) -> core_schema.CoreSchema:
+ """Apply validators to a schema.
+
+ Args:
+ schema: The schema to apply validators on.
+ validators: An iterable of validators.
+ field_name: The name of the field if validators are being applied to a model field.
+
+ Returns:
+ The updated schema.
+ """
+ for validator in validators:
+ info_arg = inspect_validator(validator.func, validator.info.mode)
+ val_type = 'with-info' if info_arg else 'no-info'
+
+ schema = _VALIDATOR_F_MATCH[(validator.info.mode, val_type)](validator.func, schema, field_name)
+ return schema
+
+
+def _validators_require_validate_default(validators: Iterable[Decorator[ValidatorDecoratorInfo]]) -> bool:
+ """In v1, if any of the validators for a field had `always=True`, the default value would be validated.
+
+ This serves as an auxiliary function for re-implementing that logic, by looping over a provided
+ collection of (v1-style) ValidatorDecoratorInfo's and checking if any of them have `always=True`.
+
+ We should be able to drop this function and the associated logic calling it once we drop support
+ for v1-style validator decorators. (Or we can extend it and keep it if we add something equivalent
+ to the v1-validator `always` kwarg to `field_validator`.)
+ """
+ for validator in validators:
+ if validator.info.always:
+ return True
+ return False
+
+
+def apply_model_validators(
+ schema: core_schema.CoreSchema,
+ validators: Iterable[Decorator[ModelValidatorDecoratorInfo]],
+ mode: Literal['inner', 'outer', 'all'],
+) -> core_schema.CoreSchema:
+ """Apply model validators to a schema.
+
+ If mode == 'inner', only "before" validators are applied
+ If mode == 'outer', validators other than "before" are applied
+ If mode == 'all', all validators are applied
+
+ Args:
+ schema: The schema to apply validators on.
+ validators: An iterable of validators.
+ mode: The validator mode.
+
+ Returns:
+ The updated schema.
+ """
+ ref: str | None = schema.pop('ref', None) # type: ignore
+ for validator in validators:
+ if mode == 'inner' and validator.info.mode != 'before':
+ continue
+ if mode == 'outer' and validator.info.mode == 'before':
+ continue
+ info_arg = inspect_validator(validator.func, validator.info.mode)
+ if validator.info.mode == 'wrap':
+ if info_arg:
+ schema = core_schema.with_info_wrap_validator_function(function=validator.func, schema=schema)
+ else:
+ schema = core_schema.no_info_wrap_validator_function(function=validator.func, schema=schema)
+ elif validator.info.mode == 'before':
+ if info_arg:
+ schema = core_schema.with_info_before_validator_function(function=validator.func, schema=schema)
+ else:
+ schema = core_schema.no_info_before_validator_function(function=validator.func, schema=schema)
+ else:
+ assert validator.info.mode == 'after'
+ if info_arg:
+ schema = core_schema.with_info_after_validator_function(function=validator.func, schema=schema)
+ else:
+ schema = core_schema.no_info_after_validator_function(function=validator.func, schema=schema)
+ if ref:
+ schema['ref'] = ref # type: ignore
+ return schema
+
+
+def wrap_default(field_info: FieldInfo, schema: core_schema.CoreSchema) -> core_schema.CoreSchema:
+ """Wrap schema with default schema if default value or `default_factory` are available.
+
+ Args:
+ field_info: The field info object.
+ schema: The schema to apply default on.
+
+ Returns:
+ Updated schema by default value or `default_factory`.
+ """
+ if field_info.default_factory:
+ return core_schema.with_default_schema(
+ schema,
+ default_factory=field_info.default_factory,
+ default_factory_takes_data=takes_validated_data_argument(field_info.default_factory),
+ validate_default=field_info.validate_default,
+ )
+ elif field_info.default is not PydanticUndefined:
+ return core_schema.with_default_schema(
+ schema, default=field_info.default, validate_default=field_info.validate_default
+ )
+ else:
+ return schema
+
+
+def _extract_get_pydantic_json_schema(tp: Any, schema: CoreSchema) -> GetJsonSchemaFunction | None:
+ """Extract `__get_pydantic_json_schema__` from a type, handling the deprecated `__modify_schema__`."""
+ js_modify_function = getattr(tp, '__get_pydantic_json_schema__', None)
+
+ if hasattr(tp, '__modify_schema__'):
+ BaseModel = import_cached_base_model()
+
+ has_custom_v2_modify_js_func = (
+ js_modify_function is not None
+ and BaseModel.__get_pydantic_json_schema__.__func__ # type: ignore
+ not in (js_modify_function, getattr(js_modify_function, '__func__', None))
+ )
+
+ if not has_custom_v2_modify_js_func:
+ cls_name = getattr(tp, '__name__', None)
+ raise PydanticUserError(
+ f'The `__modify_schema__` method is not supported in Pydantic v2. '
+ f'Use `__get_pydantic_json_schema__` instead{f" in class `{cls_name}`" if cls_name else ""}.',
+ code='custom-json-schema',
+ )
+
+ # handle GenericAlias' but ignore Annotated which "lies" about its origin (in this case it would be `int`)
+ if hasattr(tp, '__origin__') and not _typing_extra.is_annotated(tp):
+ return _extract_get_pydantic_json_schema(tp.__origin__, schema)
+
+ if js_modify_function is None:
+ return None
+
+ return js_modify_function
+
+
+class _CommonField(TypedDict):
+ schema: core_schema.CoreSchema
+ validation_alias: str | list[str | int] | list[list[str | int]] | None
+ serialization_alias: str | None
+ serialization_exclude: bool | None
+ frozen: bool | None
+ metadata: dict[str, Any]
+
+
+def _common_field(
+ schema: core_schema.CoreSchema,
+ *,
+ validation_alias: str | list[str | int] | list[list[str | int]] | None = None,
+ serialization_alias: str | None = None,
+ serialization_exclude: bool | None = None,
+ frozen: bool | None = None,
+ metadata: Any = None,
+) -> _CommonField:
+ return {
+ 'schema': schema,
+ 'validation_alias': validation_alias,
+ 'serialization_alias': serialization_alias,
+ 'serialization_exclude': serialization_exclude,
+ 'frozen': frozen,
+ 'metadata': metadata,
+ }
+
+
+class _Definitions:
+ """Keeps track of references and definitions."""
+
+ def __init__(self) -> None:
+ self.seen: set[str] = set()
+ self.definitions: dict[str, core_schema.CoreSchema] = {}
+
+ @contextmanager
+ def get_schema_or_ref(self, tp: Any) -> Iterator[tuple[str, None] | tuple[str, CoreSchema]]:
+ """Get a definition for `tp` if one exists.
+
+ If a definition exists, a tuple of `(ref_string, CoreSchema)` is returned.
+ If no definition exists yet, a tuple of `(ref_string, None)` is returned.
+
+ Note that the returned `CoreSchema` will always be a `DefinitionReferenceSchema`,
+ not the actual definition itself.
+
+ This should be called for any type that can be identified by reference.
+ This includes any recursive types.
+
+ At present the following types can be named/recursive:
+
+ - BaseModel
+ - Dataclasses
+ - TypedDict
+ - TypeAliasType
+ """
+ ref = get_type_ref(tp)
+ # return the reference if we're either (1) in a cycle or (2) it was already defined
+ if ref in self.seen or ref in self.definitions:
+ yield (ref, core_schema.definition_reference_schema(ref))
+ else:
+ self.seen.add(ref)
+ try:
+ yield (ref, None)
+ finally:
+ self.seen.discard(ref)
+
+
+def resolve_original_schema(schema: CoreSchema, definitions: dict[str, CoreSchema]) -> CoreSchema | None:
+ if schema['type'] == 'definition-ref':
+ return definitions.get(schema['schema_ref'], None)
+ elif schema['type'] == 'definitions':
+ return schema['schema']
+ else:
+ return schema
+
+
+class _FieldNameStack:
+ __slots__ = ('_stack',)
+
+ def __init__(self) -> None:
+ self._stack: list[str] = []
+
+ @contextmanager
+ def push(self, field_name: str) -> Iterator[None]:
+ self._stack.append(field_name)
+ yield
+ self._stack.pop()
+
+ def get(self) -> str | None:
+ if self._stack:
+ return self._stack[-1]
+ else:
+ return None
+
+
+class _ModelTypeStack:
+ __slots__ = ('_stack',)
+
+ def __init__(self) -> None:
+ self._stack: list[type] = []
+
+ @contextmanager
+ def push(self, type_obj: type) -> Iterator[None]:
+ self._stack.append(type_obj)
+ yield
+ self._stack.pop()
+
+ def get(self) -> type | None:
+ if self._stack:
+ return self._stack[-1]
+ else:
+ return None
diff --git a/.venv/lib/python3.12/site-packages/pydantic/_internal/_generics.py b/.venv/lib/python3.12/site-packages/pydantic/_internal/_generics.py
new file mode 100644
index 00000000..8a9de221
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pydantic/_internal/_generics.py
@@ -0,0 +1,536 @@
+from __future__ import annotations
+
+import sys
+import types
+import typing
+from collections import ChainMap
+from contextlib import contextmanager
+from contextvars import ContextVar
+from types import prepare_class
+from typing import TYPE_CHECKING, Any, Iterator, Mapping, MutableMapping, Tuple, TypeVar
+from weakref import WeakValueDictionary
+
+import typing_extensions
+
+from . import _typing_extra
+from ._core_utils import get_type_ref
+from ._forward_ref import PydanticRecursiveRef
+from ._utils import all_identical, is_model_class
+
+if sys.version_info >= (3, 10):
+ from typing import _UnionGenericAlias # type: ignore[attr-defined]
+
+if TYPE_CHECKING:
+ from ..main import BaseModel
+
+GenericTypesCacheKey = Tuple[Any, Any, Tuple[Any, ...]]
+
+# Note: We want to remove LimitedDict, but to do this, we'd need to improve the handling of generics caching.
+# Right now, to handle recursive generics, we some types must remain cached for brief periods without references.
+# By chaining the WeakValuesDict with a LimitedDict, we have a way to retain caching for all types with references,
+# while also retaining a limited number of types even without references. This is generally enough to build
+# specific recursive generic models without losing required items out of the cache.
+
+KT = TypeVar('KT')
+VT = TypeVar('VT')
+_LIMITED_DICT_SIZE = 100
+if TYPE_CHECKING:
+
+ class LimitedDict(dict, MutableMapping[KT, VT]):
+ def __init__(self, size_limit: int = _LIMITED_DICT_SIZE): ...
+
+else:
+
+ class LimitedDict(dict):
+ """Limit the size/length of a dict used for caching to avoid unlimited increase in memory usage.
+
+ Since the dict is ordered, and we always remove elements from the beginning, this is effectively a FIFO cache.
+ """
+
+ def __init__(self, size_limit: int = _LIMITED_DICT_SIZE):
+ self.size_limit = size_limit
+ super().__init__()
+
+ def __setitem__(self, key: Any, value: Any, /) -> None:
+ super().__setitem__(key, value)
+ if len(self) > self.size_limit:
+ excess = len(self) - self.size_limit + self.size_limit // 10
+ to_remove = list(self.keys())[:excess]
+ for k in to_remove:
+ del self[k]
+
+
+# weak dictionaries allow the dynamically created parametrized versions of generic models to get collected
+# once they are no longer referenced by the caller.
+if sys.version_info >= (3, 9): # Typing for weak dictionaries available at 3.9
+ GenericTypesCache = WeakValueDictionary[GenericTypesCacheKey, 'type[BaseModel]']
+else:
+ GenericTypesCache = WeakValueDictionary
+
+if TYPE_CHECKING:
+
+ class DeepChainMap(ChainMap[KT, VT]): # type: ignore
+ ...
+
+else:
+
+ class DeepChainMap(ChainMap):
+ """Variant of ChainMap that allows direct updates to inner scopes.
+
+ Taken from https://docs.python.org/3/library/collections.html#collections.ChainMap,
+ with some light modifications for this use case.
+ """
+
+ def clear(self) -> None:
+ for mapping in self.maps:
+ mapping.clear()
+
+ def __setitem__(self, key: KT, value: VT) -> None:
+ for mapping in self.maps:
+ mapping[key] = value
+
+ def __delitem__(self, key: KT) -> None:
+ hit = False
+ for mapping in self.maps:
+ if key in mapping:
+ del mapping[key]
+ hit = True
+ if not hit:
+ raise KeyError(key)
+
+
+# Despite the fact that LimitedDict _seems_ no longer necessary, I'm very nervous to actually remove it
+# and discover later on that we need to re-add all this infrastructure...
+# _GENERIC_TYPES_CACHE = DeepChainMap(GenericTypesCache(), LimitedDict())
+
+_GENERIC_TYPES_CACHE = GenericTypesCache()
+
+
+class PydanticGenericMetadata(typing_extensions.TypedDict):
+ origin: type[BaseModel] | None # analogous to typing._GenericAlias.__origin__
+ args: tuple[Any, ...] # analogous to typing._GenericAlias.__args__
+ parameters: tuple[TypeVar, ...] # analogous to typing.Generic.__parameters__
+
+
+def create_generic_submodel(
+ model_name: str, origin: type[BaseModel], args: tuple[Any, ...], params: tuple[Any, ...]
+) -> type[BaseModel]:
+ """Dynamically create a submodel of a provided (generic) BaseModel.
+
+ This is used when producing concrete parametrizations of generic models. This function
+ only *creates* the new subclass; the schema/validators/serialization must be updated to
+ reflect a concrete parametrization elsewhere.
+
+ Args:
+ model_name: The name of the newly created model.
+ origin: The base class for the new model to inherit from.
+ args: A tuple of generic metadata arguments.
+ params: A tuple of generic metadata parameters.
+
+ Returns:
+ The created submodel.
+ """
+ namespace: dict[str, Any] = {'__module__': origin.__module__}
+ bases = (origin,)
+ meta, ns, kwds = prepare_class(model_name, bases)
+ namespace.update(ns)
+ created_model = meta(
+ model_name,
+ bases,
+ namespace,
+ __pydantic_generic_metadata__={
+ 'origin': origin,
+ 'args': args,
+ 'parameters': params,
+ },
+ __pydantic_reset_parent_namespace__=False,
+ **kwds,
+ )
+
+ model_module, called_globally = _get_caller_frame_info(depth=3)
+ if called_globally: # create global reference and therefore allow pickling
+ object_by_reference = None
+ reference_name = model_name
+ reference_module_globals = sys.modules[created_model.__module__].__dict__
+ while object_by_reference is not created_model:
+ object_by_reference = reference_module_globals.setdefault(reference_name, created_model)
+ reference_name += '_'
+
+ return created_model
+
+
+def _get_caller_frame_info(depth: int = 2) -> tuple[str | None, bool]:
+ """Used inside a function to check whether it was called globally.
+
+ Args:
+ depth: The depth to get the frame.
+
+ Returns:
+ A tuple contains `module_name` and `called_globally`.
+
+ Raises:
+ RuntimeError: If the function is not called inside a function.
+ """
+ try:
+ previous_caller_frame = sys._getframe(depth)
+ except ValueError as e:
+ raise RuntimeError('This function must be used inside another function') from e
+ except AttributeError: # sys module does not have _getframe function, so there's nothing we can do about it
+ return None, False
+ frame_globals = previous_caller_frame.f_globals
+ return frame_globals.get('__name__'), previous_caller_frame.f_locals is frame_globals
+
+
+DictValues: type[Any] = {}.values().__class__
+
+
+def iter_contained_typevars(v: Any) -> Iterator[TypeVar]:
+ """Recursively iterate through all subtypes and type args of `v` and yield any typevars that are found.
+
+ This is inspired as an alternative to directly accessing the `__parameters__` attribute of a GenericAlias,
+ since __parameters__ of (nested) generic BaseModel subclasses won't show up in that list.
+ """
+ if isinstance(v, TypeVar):
+ yield v
+ elif is_model_class(v):
+ yield from v.__pydantic_generic_metadata__['parameters']
+ elif isinstance(v, (DictValues, list)):
+ for var in v:
+ yield from iter_contained_typevars(var)
+ else:
+ args = get_args(v)
+ for arg in args:
+ yield from iter_contained_typevars(arg)
+
+
+def get_args(v: Any) -> Any:
+ pydantic_generic_metadata: PydanticGenericMetadata | None = getattr(v, '__pydantic_generic_metadata__', None)
+ if pydantic_generic_metadata:
+ return pydantic_generic_metadata.get('args')
+ return typing_extensions.get_args(v)
+
+
+def get_origin(v: Any) -> Any:
+ pydantic_generic_metadata: PydanticGenericMetadata | None = getattr(v, '__pydantic_generic_metadata__', None)
+ if pydantic_generic_metadata:
+ return pydantic_generic_metadata.get('origin')
+ return typing_extensions.get_origin(v)
+
+
+def get_standard_typevars_map(cls: Any) -> dict[TypeVar, Any] | None:
+ """Package a generic type's typevars and parametrization (if present) into a dictionary compatible with the
+ `replace_types` function. Specifically, this works with standard typing generics and typing._GenericAlias.
+ """
+ origin = get_origin(cls)
+ if origin is None:
+ return None
+ if not hasattr(origin, '__parameters__'):
+ return None
+
+ # In this case, we know that cls is a _GenericAlias, and origin is the generic type
+ # So it is safe to access cls.__args__ and origin.__parameters__
+ args: tuple[Any, ...] = cls.__args__ # type: ignore
+ parameters: tuple[TypeVar, ...] = origin.__parameters__
+ return dict(zip(parameters, args))
+
+
+def get_model_typevars_map(cls: type[BaseModel]) -> dict[TypeVar, Any] | None:
+ """Package a generic BaseModel's typevars and concrete parametrization (if present) into a dictionary compatible
+ with the `replace_types` function.
+
+ Since BaseModel.__class_getitem__ does not produce a typing._GenericAlias, and the BaseModel generic info is
+ stored in the __pydantic_generic_metadata__ attribute, we need special handling here.
+ """
+ # TODO: This could be unified with `get_standard_typevars_map` if we stored the generic metadata
+ # in the __origin__, __args__, and __parameters__ attributes of the model.
+ generic_metadata = cls.__pydantic_generic_metadata__
+ origin = generic_metadata['origin']
+ args = generic_metadata['args']
+ return dict(zip(iter_contained_typevars(origin), args))
+
+
+def replace_types(type_: Any, type_map: Mapping[Any, Any] | None) -> Any:
+ """Return type with all occurrences of `type_map` keys recursively replaced with their values.
+
+ Args:
+ type_: The class or generic alias.
+ type_map: Mapping from `TypeVar` instance to concrete types.
+
+ Returns:
+ A new type representing the basic structure of `type_` with all
+ `typevar_map` keys recursively replaced.
+
+ Example:
+ ```python
+ from typing import List, Tuple, Union
+
+ from pydantic._internal._generics import replace_types
+
+ replace_types(Tuple[str, Union[List[str], float]], {str: int})
+ #> Tuple[int, Union[List[int], float]]
+ ```
+ """
+ if not type_map:
+ return type_
+
+ type_args = get_args(type_)
+
+ if _typing_extra.is_annotated(type_):
+ annotated_type, *annotations = type_args
+ annotated = replace_types(annotated_type, type_map)
+ for annotation in annotations:
+ annotated = typing_extensions.Annotated[annotated, annotation]
+ return annotated
+
+ origin_type = get_origin(type_)
+
+ # Having type args is a good indicator that this is a typing special form
+ # instance or a generic alias of some sort.
+ if type_args:
+ resolved_type_args = tuple(replace_types(arg, type_map) for arg in type_args)
+ if all_identical(type_args, resolved_type_args):
+ # If all arguments are the same, there is no need to modify the
+ # type or create a new object at all
+ return type_
+
+ if (
+ origin_type is not None
+ and isinstance(type_, _typing_extra.typing_base)
+ and not isinstance(origin_type, _typing_extra.typing_base)
+ and getattr(type_, '_name', None) is not None
+ ):
+ # In python < 3.9 generic aliases don't exist so any of these like `list`,
+ # `type` or `collections.abc.Callable` need to be translated.
+ # See: https://www.python.org/dev/peps/pep-0585
+ origin_type = getattr(typing, type_._name)
+ assert origin_type is not None
+
+ if _typing_extra.origin_is_union(origin_type):
+ if any(_typing_extra.is_any(arg) for arg in resolved_type_args):
+ # `Any | T` ~ `Any`:
+ resolved_type_args = (Any,)
+ # `Never | T` ~ `T`:
+ resolved_type_args = tuple(
+ arg
+ for arg in resolved_type_args
+ if not (_typing_extra.is_no_return(arg) or _typing_extra.is_never(arg))
+ )
+
+ # PEP-604 syntax (Ex.: list | str) is represented with a types.UnionType object that does not have __getitem__.
+ # We also cannot use isinstance() since we have to compare types.
+ if sys.version_info >= (3, 10) and origin_type is types.UnionType:
+ return _UnionGenericAlias(origin_type, resolved_type_args)
+ # NotRequired[T] and Required[T] don't support tuple type resolved_type_args, hence the condition below
+ return origin_type[resolved_type_args[0] if len(resolved_type_args) == 1 else resolved_type_args]
+
+ # We handle pydantic generic models separately as they don't have the same
+ # semantics as "typing" classes or generic aliases
+
+ if not origin_type and is_model_class(type_):
+ parameters = type_.__pydantic_generic_metadata__['parameters']
+ if not parameters:
+ return type_
+ resolved_type_args = tuple(replace_types(t, type_map) for t in parameters)
+ if all_identical(parameters, resolved_type_args):
+ return type_
+ return type_[resolved_type_args]
+
+ # Handle special case for typehints that can have lists as arguments.
+ # `typing.Callable[[int, str], int]` is an example for this.
+ if isinstance(type_, list):
+ resolved_list = [replace_types(element, type_map) for element in type_]
+ if all_identical(type_, resolved_list):
+ return type_
+ return resolved_list
+
+ # If all else fails, we try to resolve the type directly and otherwise just
+ # return the input with no modifications.
+ return type_map.get(type_, type_)
+
+
+def has_instance_in_type(type_: Any, isinstance_target: Any) -> bool:
+ """Checks if the type, or any of its arbitrary nested args, satisfy
+ `isinstance(<type>, isinstance_target)`.
+ """
+ if isinstance(type_, isinstance_target):
+ return True
+ if _typing_extra.is_annotated(type_):
+ return has_instance_in_type(type_.__origin__, isinstance_target)
+ if _typing_extra.is_literal(type_):
+ return False
+
+ type_args = get_args(type_)
+
+ # Having type args is a good indicator that this is a typing module
+ # class instantiation or a generic alias of some sort.
+ for arg in type_args:
+ if has_instance_in_type(arg, isinstance_target):
+ return True
+
+ # Handle special case for typehints that can have lists as arguments.
+ # `typing.Callable[[int, str], int]` is an example for this.
+ if (
+ isinstance(type_, list)
+ # On Python < 3.10, typing_extensions implements `ParamSpec` as a subclass of `list`:
+ and not isinstance(type_, typing_extensions.ParamSpec)
+ ):
+ for element in type_:
+ if has_instance_in_type(element, isinstance_target):
+ return True
+
+ return False
+
+
+def check_parameters_count(cls: type[BaseModel], parameters: tuple[Any, ...]) -> None:
+ """Check the generic model parameters count is equal.
+
+ Args:
+ cls: The generic model.
+ parameters: A tuple of passed parameters to the generic model.
+
+ Raises:
+ TypeError: If the passed parameters count is not equal to generic model parameters count.
+ """
+ actual = len(parameters)
+ expected = len(cls.__pydantic_generic_metadata__['parameters'])
+ if actual != expected:
+ description = 'many' if actual > expected else 'few'
+ raise TypeError(f'Too {description} parameters for {cls}; actual {actual}, expected {expected}')
+
+
+_generic_recursion_cache: ContextVar[set[str] | None] = ContextVar('_generic_recursion_cache', default=None)
+
+
+@contextmanager
+def generic_recursion_self_type(
+ origin: type[BaseModel], args: tuple[Any, ...]
+) -> Iterator[PydanticRecursiveRef | None]:
+ """This contextmanager should be placed around the recursive calls used to build a generic type,
+ and accept as arguments the generic origin type and the type arguments being passed to it.
+
+ If the same origin and arguments are observed twice, it implies that a self-reference placeholder
+ can be used while building the core schema, and will produce a schema_ref that will be valid in the
+ final parent schema.
+ """
+ previously_seen_type_refs = _generic_recursion_cache.get()
+ if previously_seen_type_refs is None:
+ previously_seen_type_refs = set()
+ token = _generic_recursion_cache.set(previously_seen_type_refs)
+ else:
+ token = None
+
+ try:
+ type_ref = get_type_ref(origin, args_override=args)
+ if type_ref in previously_seen_type_refs:
+ self_type = PydanticRecursiveRef(type_ref=type_ref)
+ yield self_type
+ else:
+ previously_seen_type_refs.add(type_ref)
+ yield
+ previously_seen_type_refs.remove(type_ref)
+ finally:
+ if token:
+ _generic_recursion_cache.reset(token)
+
+
+def recursively_defined_type_refs() -> set[str]:
+ visited = _generic_recursion_cache.get()
+ if not visited:
+ return set() # not in a generic recursion, so there are no types
+
+ return visited.copy() # don't allow modifications
+
+
+def get_cached_generic_type_early(parent: type[BaseModel], typevar_values: Any) -> type[BaseModel] | None:
+ """The use of a two-stage cache lookup approach was necessary to have the highest performance possible for
+ repeated calls to `__class_getitem__` on generic types (which may happen in tighter loops during runtime),
+ while still ensuring that certain alternative parametrizations ultimately resolve to the same type.
+
+ As a concrete example, this approach was necessary to make Model[List[T]][int] equal to Model[List[int]].
+ The approach could be modified to not use two different cache keys at different points, but the
+ _early_cache_key is optimized to be as quick to compute as possible (for repeated-access speed), and the
+ _late_cache_key is optimized to be as "correct" as possible, so that two types that will ultimately be the
+ same after resolving the type arguments will always produce cache hits.
+
+ If we wanted to move to only using a single cache key per type, we would either need to always use the
+ slower/more computationally intensive logic associated with _late_cache_key, or would need to accept
+ that Model[List[T]][int] is a different type than Model[List[T]][int]. Because we rely on subclass relationships
+ during validation, I think it is worthwhile to ensure that types that are functionally equivalent are actually
+ equal.
+ """
+ return _GENERIC_TYPES_CACHE.get(_early_cache_key(parent, typevar_values))
+
+
+def get_cached_generic_type_late(
+ parent: type[BaseModel], typevar_values: Any, origin: type[BaseModel], args: tuple[Any, ...]
+) -> type[BaseModel] | None:
+ """See the docstring of `get_cached_generic_type_early` for more information about the two-stage cache lookup."""
+ cached = _GENERIC_TYPES_CACHE.get(_late_cache_key(origin, args, typevar_values))
+ if cached is not None:
+ set_cached_generic_type(parent, typevar_values, cached, origin, args)
+ return cached
+
+
+def set_cached_generic_type(
+ parent: type[BaseModel],
+ typevar_values: tuple[Any, ...],
+ type_: type[BaseModel],
+ origin: type[BaseModel] | None = None,
+ args: tuple[Any, ...] | None = None,
+) -> None:
+ """See the docstring of `get_cached_generic_type_early` for more information about why items are cached with
+ two different keys.
+ """
+ _GENERIC_TYPES_CACHE[_early_cache_key(parent, typevar_values)] = type_
+ if len(typevar_values) == 1:
+ _GENERIC_TYPES_CACHE[_early_cache_key(parent, typevar_values[0])] = type_
+ if origin and args:
+ _GENERIC_TYPES_CACHE[_late_cache_key(origin, args, typevar_values)] = type_
+
+
+def _union_orderings_key(typevar_values: Any) -> Any:
+ """This is intended to help differentiate between Union types with the same arguments in different order.
+
+ Thanks to caching internal to the `typing` module, it is not possible to distinguish between
+ List[Union[int, float]] and List[Union[float, int]] (and similarly for other "parent" origins besides List)
+ because `typing` considers Union[int, float] to be equal to Union[float, int].
+
+ However, you _can_ distinguish between (top-level) Union[int, float] vs. Union[float, int].
+ Because we parse items as the first Union type that is successful, we get slightly more consistent behavior
+ if we make an effort to distinguish the ordering of items in a union. It would be best if we could _always_
+ get the exact-correct order of items in the union, but that would require a change to the `typing` module itself.
+ (See https://github.com/python/cpython/issues/86483 for reference.)
+ """
+ if isinstance(typevar_values, tuple):
+ args_data = []
+ for value in typevar_values:
+ args_data.append(_union_orderings_key(value))
+ return tuple(args_data)
+ elif _typing_extra.is_union(typevar_values):
+ return get_args(typevar_values)
+ else:
+ return ()
+
+
+def _early_cache_key(cls: type[BaseModel], typevar_values: Any) -> GenericTypesCacheKey:
+ """This is intended for minimal computational overhead during lookups of cached types.
+
+ Note that this is overly simplistic, and it's possible that two different cls/typevar_values
+ inputs would ultimately result in the same type being created in BaseModel.__class_getitem__.
+ To handle this, we have a fallback _late_cache_key that is checked later if the _early_cache_key
+ lookup fails, and should result in a cache hit _precisely_ when the inputs to __class_getitem__
+ would result in the same type.
+ """
+ return cls, typevar_values, _union_orderings_key(typevar_values)
+
+
+def _late_cache_key(origin: type[BaseModel], args: tuple[Any, ...], typevar_values: Any) -> GenericTypesCacheKey:
+ """This is intended for use later in the process of creating a new type, when we have more information
+ about the exact args that will be passed. If it turns out that a different set of inputs to
+ __class_getitem__ resulted in the same inputs to the generic type creation process, we can still
+ return the cached type, and update the cache with the _early_cache_key as well.
+ """
+ # The _union_orderings_key is placed at the start here to ensure there cannot be a collision with an
+ # _early_cache_key, as that function will always produce a BaseModel subclass as the first item in the key,
+ # whereas this function will always produce a tuple as the first item in the key.
+ return _union_orderings_key(typevar_values), origin, args
diff --git a/.venv/lib/python3.12/site-packages/pydantic/_internal/_git.py b/.venv/lib/python3.12/site-packages/pydantic/_internal/_git.py
new file mode 100644
index 00000000..bff0dca3
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pydantic/_internal/_git.py
@@ -0,0 +1,27 @@
+"""Git utilities, adopted from mypy's git utilities (https://github.com/python/mypy/blob/master/mypy/git.py)."""
+
+from __future__ import annotations
+
+import os
+import subprocess
+
+
+def is_git_repo(dir: str) -> bool:
+ """Is the given directory version-controlled with git?"""
+ return os.path.exists(os.path.join(dir, '.git'))
+
+
+def have_git() -> bool:
+ """Can we run the git executable?"""
+ try:
+ subprocess.check_output(['git', '--help'])
+ return True
+ except subprocess.CalledProcessError:
+ return False
+ except OSError:
+ return False
+
+
+def git_revision(dir: str) -> str:
+ """Get the SHA-1 of the HEAD of a git repository."""
+ return subprocess.check_output(['git', 'rev-parse', '--short', 'HEAD'], cwd=dir).decode('utf-8').strip()
diff --git a/.venv/lib/python3.12/site-packages/pydantic/_internal/_import_utils.py b/.venv/lib/python3.12/site-packages/pydantic/_internal/_import_utils.py
new file mode 100644
index 00000000..29748eca
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pydantic/_internal/_import_utils.py
@@ -0,0 +1,20 @@
+from functools import lru_cache
+from typing import TYPE_CHECKING, Type
+
+if TYPE_CHECKING:
+ from pydantic import BaseModel
+ from pydantic.fields import FieldInfo
+
+
+@lru_cache(maxsize=None)
+def import_cached_base_model() -> Type['BaseModel']:
+ from pydantic import BaseModel
+
+ return BaseModel
+
+
+@lru_cache(maxsize=None)
+def import_cached_field_info() -> Type['FieldInfo']:
+ from pydantic.fields import FieldInfo
+
+ return FieldInfo
diff --git a/.venv/lib/python3.12/site-packages/pydantic/_internal/_internal_dataclass.py b/.venv/lib/python3.12/site-packages/pydantic/_internal/_internal_dataclass.py
new file mode 100644
index 00000000..33e152cc
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pydantic/_internal/_internal_dataclass.py
@@ -0,0 +1,7 @@
+import sys
+
+# `slots` is available on Python >= 3.10
+if sys.version_info >= (3, 10):
+ slots_true = {'slots': True}
+else:
+ slots_true = {}
diff --git a/.venv/lib/python3.12/site-packages/pydantic/_internal/_known_annotated_metadata.py b/.venv/lib/python3.12/site-packages/pydantic/_internal/_known_annotated_metadata.py
new file mode 100644
index 00000000..78de89ec
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pydantic/_internal/_known_annotated_metadata.py
@@ -0,0 +1,392 @@
+from __future__ import annotations
+
+from collections import defaultdict
+from copy import copy
+from functools import lru_cache, partial
+from typing import TYPE_CHECKING, Any, Iterable
+
+from pydantic_core import CoreSchema, PydanticCustomError, ValidationError, to_jsonable_python
+from pydantic_core import core_schema as cs
+
+from ._fields import PydanticMetadata
+from ._import_utils import import_cached_field_info
+
+if TYPE_CHECKING:
+ pass
+
+STRICT = {'strict'}
+FAIL_FAST = {'fail_fast'}
+LENGTH_CONSTRAINTS = {'min_length', 'max_length'}
+INEQUALITY = {'le', 'ge', 'lt', 'gt'}
+NUMERIC_CONSTRAINTS = {'multiple_of', *INEQUALITY}
+ALLOW_INF_NAN = {'allow_inf_nan'}
+
+STR_CONSTRAINTS = {
+ *LENGTH_CONSTRAINTS,
+ *STRICT,
+ 'strip_whitespace',
+ 'to_lower',
+ 'to_upper',
+ 'pattern',
+ 'coerce_numbers_to_str',
+}
+BYTES_CONSTRAINTS = {*LENGTH_CONSTRAINTS, *STRICT}
+
+LIST_CONSTRAINTS = {*LENGTH_CONSTRAINTS, *STRICT, *FAIL_FAST}
+TUPLE_CONSTRAINTS = {*LENGTH_CONSTRAINTS, *STRICT, *FAIL_FAST}
+SET_CONSTRAINTS = {*LENGTH_CONSTRAINTS, *STRICT, *FAIL_FAST}
+DICT_CONSTRAINTS = {*LENGTH_CONSTRAINTS, *STRICT}
+GENERATOR_CONSTRAINTS = {*LENGTH_CONSTRAINTS, *STRICT}
+SEQUENCE_CONSTRAINTS = {*LENGTH_CONSTRAINTS, *FAIL_FAST}
+
+FLOAT_CONSTRAINTS = {*NUMERIC_CONSTRAINTS, *ALLOW_INF_NAN, *STRICT}
+DECIMAL_CONSTRAINTS = {'max_digits', 'decimal_places', *FLOAT_CONSTRAINTS}
+INT_CONSTRAINTS = {*NUMERIC_CONSTRAINTS, *ALLOW_INF_NAN, *STRICT}
+BOOL_CONSTRAINTS = STRICT
+UUID_CONSTRAINTS = STRICT
+
+DATE_TIME_CONSTRAINTS = {*NUMERIC_CONSTRAINTS, *STRICT}
+TIMEDELTA_CONSTRAINTS = {*NUMERIC_CONSTRAINTS, *STRICT}
+TIME_CONSTRAINTS = {*NUMERIC_CONSTRAINTS, *STRICT}
+LAX_OR_STRICT_CONSTRAINTS = STRICT
+ENUM_CONSTRAINTS = STRICT
+COMPLEX_CONSTRAINTS = STRICT
+
+UNION_CONSTRAINTS = {'union_mode'}
+URL_CONSTRAINTS = {
+ 'max_length',
+ 'allowed_schemes',
+ 'host_required',
+ 'default_host',
+ 'default_port',
+ 'default_path',
+}
+
+TEXT_SCHEMA_TYPES = ('str', 'bytes', 'url', 'multi-host-url')
+SEQUENCE_SCHEMA_TYPES = ('list', 'tuple', 'set', 'frozenset', 'generator', *TEXT_SCHEMA_TYPES)
+NUMERIC_SCHEMA_TYPES = ('float', 'int', 'date', 'time', 'timedelta', 'datetime')
+
+CONSTRAINTS_TO_ALLOWED_SCHEMAS: dict[str, set[str]] = defaultdict(set)
+
+constraint_schema_pairings: list[tuple[set[str], tuple[str, ...]]] = [
+ (STR_CONSTRAINTS, TEXT_SCHEMA_TYPES),
+ (BYTES_CONSTRAINTS, ('bytes',)),
+ (LIST_CONSTRAINTS, ('list',)),
+ (TUPLE_CONSTRAINTS, ('tuple',)),
+ (SET_CONSTRAINTS, ('set', 'frozenset')),
+ (DICT_CONSTRAINTS, ('dict',)),
+ (GENERATOR_CONSTRAINTS, ('generator',)),
+ (FLOAT_CONSTRAINTS, ('float',)),
+ (INT_CONSTRAINTS, ('int',)),
+ (DATE_TIME_CONSTRAINTS, ('date', 'time', 'datetime', 'timedelta')),
+ # TODO: this is a bit redundant, we could probably avoid some of these
+ (STRICT, (*TEXT_SCHEMA_TYPES, *SEQUENCE_SCHEMA_TYPES, *NUMERIC_SCHEMA_TYPES, 'typed-dict', 'model')),
+ (UNION_CONSTRAINTS, ('union',)),
+ (URL_CONSTRAINTS, ('url', 'multi-host-url')),
+ (BOOL_CONSTRAINTS, ('bool',)),
+ (UUID_CONSTRAINTS, ('uuid',)),
+ (LAX_OR_STRICT_CONSTRAINTS, ('lax-or-strict',)),
+ (ENUM_CONSTRAINTS, ('enum',)),
+ (DECIMAL_CONSTRAINTS, ('decimal',)),
+ (COMPLEX_CONSTRAINTS, ('complex',)),
+]
+
+for constraints, schemas in constraint_schema_pairings:
+ for c in constraints:
+ CONSTRAINTS_TO_ALLOWED_SCHEMAS[c].update(schemas)
+
+
+def as_jsonable_value(v: Any) -> Any:
+ if type(v) not in (int, str, float, bytes, bool, type(None)):
+ return to_jsonable_python(v)
+ return v
+
+
+def expand_grouped_metadata(annotations: Iterable[Any]) -> Iterable[Any]:
+ """Expand the annotations.
+
+ Args:
+ annotations: An iterable of annotations.
+
+ Returns:
+ An iterable of expanded annotations.
+
+ Example:
+ ```python
+ from annotated_types import Ge, Len
+
+ from pydantic._internal._known_annotated_metadata import expand_grouped_metadata
+
+ print(list(expand_grouped_metadata([Ge(4), Len(5)])))
+ #> [Ge(ge=4), MinLen(min_length=5)]
+ ```
+ """
+ import annotated_types as at
+
+ FieldInfo = import_cached_field_info()
+
+ for annotation in annotations:
+ if isinstance(annotation, at.GroupedMetadata):
+ yield from annotation
+ elif isinstance(annotation, FieldInfo):
+ yield from annotation.metadata
+ # this is a bit problematic in that it results in duplicate metadata
+ # all of our "consumers" can handle it, but it is not ideal
+ # we probably should split up FieldInfo into:
+ # - annotated types metadata
+ # - individual metadata known only to Pydantic
+ annotation = copy(annotation)
+ annotation.metadata = []
+ yield annotation
+ else:
+ yield annotation
+
+
+@lru_cache
+def _get_at_to_constraint_map() -> dict[type, str]:
+ """Return a mapping of annotated types to constraints.
+
+ Normally, we would define a mapping like this in the module scope, but we can't do that
+ because we don't permit module level imports of `annotated_types`, in an attempt to speed up
+ the import time of `pydantic`. We still only want to have this dictionary defined in one place,
+ so we use this function to cache the result.
+ """
+ import annotated_types as at
+
+ return {
+ at.Gt: 'gt',
+ at.Ge: 'ge',
+ at.Lt: 'lt',
+ at.Le: 'le',
+ at.MultipleOf: 'multiple_of',
+ at.MinLen: 'min_length',
+ at.MaxLen: 'max_length',
+ }
+
+
+def apply_known_metadata(annotation: Any, schema: CoreSchema) -> CoreSchema | None: # noqa: C901
+ """Apply `annotation` to `schema` if it is an annotation we know about (Gt, Le, etc.).
+ Otherwise return `None`.
+
+ This does not handle all known annotations. If / when it does, it can always
+ return a CoreSchema and return the unmodified schema if the annotation should be ignored.
+
+ Assumes that GroupedMetadata has already been expanded via `expand_grouped_metadata`.
+
+ Args:
+ annotation: The annotation.
+ schema: The schema.
+
+ Returns:
+ An updated schema with annotation if it is an annotation we know about, `None` otherwise.
+
+ Raises:
+ PydanticCustomError: If `Predicate` fails.
+ """
+ import annotated_types as at
+
+ from ._validators import NUMERIC_VALIDATOR_LOOKUP, forbid_inf_nan_check
+
+ schema = schema.copy()
+ schema_update, other_metadata = collect_known_metadata([annotation])
+ schema_type = schema['type']
+
+ chain_schema_constraints: set[str] = {
+ 'pattern',
+ 'strip_whitespace',
+ 'to_lower',
+ 'to_upper',
+ 'coerce_numbers_to_str',
+ }
+ chain_schema_steps: list[CoreSchema] = []
+
+ for constraint, value in schema_update.items():
+ if constraint not in CONSTRAINTS_TO_ALLOWED_SCHEMAS:
+ raise ValueError(f'Unknown constraint {constraint}')
+ allowed_schemas = CONSTRAINTS_TO_ALLOWED_SCHEMAS[constraint]
+
+ # if it becomes necessary to handle more than one constraint
+ # in this recursive case with function-after or function-wrap, we should refactor
+ # this is a bit challenging because we sometimes want to apply constraints to the inner schema,
+ # whereas other times we want to wrap the existing schema with a new one that enforces a new constraint.
+ if schema_type in {'function-before', 'function-wrap', 'function-after'} and constraint == 'strict':
+ schema['schema'] = apply_known_metadata(annotation, schema['schema']) # type: ignore # schema is function schema
+ return schema
+
+ # if we're allowed to apply constraint directly to the schema, like le to int, do that
+ if schema_type in allowed_schemas:
+ if constraint == 'union_mode' and schema_type == 'union':
+ schema['mode'] = value # type: ignore # schema is UnionSchema
+ else:
+ schema[constraint] = value
+ continue
+
+ # else, apply a function after validator to the schema to enforce the corresponding constraint
+ if constraint in chain_schema_constraints:
+
+ def _apply_constraint_with_incompatibility_info(
+ value: Any, handler: cs.ValidatorFunctionWrapHandler
+ ) -> Any:
+ try:
+ x = handler(value)
+ except ValidationError as ve:
+ # if the error is about the type, it's likely that the constraint is incompatible the type of the field
+ # for example, the following invalid schema wouldn't be caught during schema build, but rather at this point
+ # with a cryptic 'string_type' error coming from the string validator,
+ # that we'd rather express as a constraint incompatibility error (TypeError)
+ # Annotated[list[int], Field(pattern='abc')]
+ if 'type' in ve.errors()[0]['type']:
+ raise TypeError(
+ f"Unable to apply constraint '{constraint}' to supplied value {value} for schema of type '{schema_type}'" # noqa: B023
+ )
+ raise ve
+ return x
+
+ chain_schema_steps.append(
+ cs.no_info_wrap_validator_function(
+ _apply_constraint_with_incompatibility_info, cs.str_schema(**{constraint: value})
+ )
+ )
+ elif constraint in NUMERIC_VALIDATOR_LOOKUP:
+ if constraint in LENGTH_CONSTRAINTS:
+ inner_schema = schema
+ while inner_schema['type'] in {'function-before', 'function-wrap', 'function-after'}:
+ inner_schema = inner_schema['schema'] # type: ignore
+ inner_schema_type = inner_schema['type']
+ if inner_schema_type == 'list' or (
+ inner_schema_type == 'json-or-python' and inner_schema['json_schema']['type'] == 'list' # type: ignore
+ ):
+ js_constraint_key = 'minItems' if constraint == 'min_length' else 'maxItems'
+ else:
+ js_constraint_key = 'minLength' if constraint == 'min_length' else 'maxLength'
+ else:
+ js_constraint_key = constraint
+
+ schema = cs.no_info_after_validator_function(
+ partial(NUMERIC_VALIDATOR_LOOKUP[constraint], **{constraint: value}), schema
+ )
+ metadata = schema.get('metadata', {})
+ if (existing_json_schema_updates := metadata.get('pydantic_js_updates')) is not None:
+ metadata['pydantic_js_updates'] = {
+ **existing_json_schema_updates,
+ **{js_constraint_key: as_jsonable_value(value)},
+ }
+ else:
+ metadata['pydantic_js_updates'] = {js_constraint_key: as_jsonable_value(value)}
+ schema['metadata'] = metadata
+ elif constraint == 'allow_inf_nan' and value is False:
+ schema = cs.no_info_after_validator_function(
+ forbid_inf_nan_check,
+ schema,
+ )
+ else:
+ # It's rare that we'd get here, but it's possible if we add a new constraint and forget to handle it
+ # Most constraint errors are caught at runtime during attempted application
+ raise RuntimeError(f"Unable to apply constraint '{constraint}' to schema of type '{schema_type}'")
+
+ for annotation in other_metadata:
+ if (annotation_type := type(annotation)) in (at_to_constraint_map := _get_at_to_constraint_map()):
+ constraint = at_to_constraint_map[annotation_type]
+ validator = NUMERIC_VALIDATOR_LOOKUP.get(constraint)
+ if validator is None:
+ raise ValueError(f'Unknown constraint {constraint}')
+ schema = cs.no_info_after_validator_function(
+ partial(validator, {constraint: getattr(annotation, constraint)}), schema
+ )
+ continue
+ elif isinstance(annotation, (at.Predicate, at.Not)):
+ predicate_name = f'{annotation.func.__qualname__}' if hasattr(annotation.func, '__qualname__') else ''
+
+ def val_func(v: Any) -> Any:
+ predicate_satisfied = annotation.func(v) # noqa: B023
+
+ # annotation.func may also raise an exception, let it pass through
+ if isinstance(annotation, at.Predicate): # noqa: B023
+ if not predicate_satisfied:
+ raise PydanticCustomError(
+ 'predicate_failed',
+ f'Predicate {predicate_name} failed', # type: ignore # noqa: B023
+ )
+ else:
+ if predicate_satisfied:
+ raise PydanticCustomError(
+ 'not_operation_failed',
+ f'Not of {predicate_name} failed', # type: ignore # noqa: B023
+ )
+
+ return v
+
+ schema = cs.no_info_after_validator_function(val_func, schema)
+ else:
+ # ignore any other unknown metadata
+ return None
+
+ if chain_schema_steps:
+ chain_schema_steps = [schema] + chain_schema_steps
+ return cs.chain_schema(chain_schema_steps)
+
+ return schema
+
+
+def collect_known_metadata(annotations: Iterable[Any]) -> tuple[dict[str, Any], list[Any]]:
+ """Split `annotations` into known metadata and unknown annotations.
+
+ Args:
+ annotations: An iterable of annotations.
+
+ Returns:
+ A tuple contains a dict of known metadata and a list of unknown annotations.
+
+ Example:
+ ```python
+ from annotated_types import Gt, Len
+
+ from pydantic._internal._known_annotated_metadata import collect_known_metadata
+
+ print(collect_known_metadata([Gt(1), Len(42), ...]))
+ #> ({'gt': 1, 'min_length': 42}, [Ellipsis])
+ ```
+ """
+ annotations = expand_grouped_metadata(annotations)
+
+ res: dict[str, Any] = {}
+ remaining: list[Any] = []
+
+ for annotation in annotations:
+ # isinstance(annotation, PydanticMetadata) also covers ._fields:_PydanticGeneralMetadata
+ if isinstance(annotation, PydanticMetadata):
+ res.update(annotation.__dict__)
+ # we don't use dataclasses.asdict because that recursively calls asdict on the field values
+ elif (annotation_type := type(annotation)) in (at_to_constraint_map := _get_at_to_constraint_map()):
+ constraint = at_to_constraint_map[annotation_type]
+ res[constraint] = getattr(annotation, constraint)
+ elif isinstance(annotation, type) and issubclass(annotation, PydanticMetadata):
+ # also support PydanticMetadata classes being used without initialisation,
+ # e.g. `Annotated[int, Strict]` as well as `Annotated[int, Strict()]`
+ res.update({k: v for k, v in vars(annotation).items() if not k.startswith('_')})
+ else:
+ remaining.append(annotation)
+ # Nones can sneak in but pydantic-core will reject them
+ # it'd be nice to clean things up so we don't put in None (we probably don't _need_ to, it was just easier)
+ # but this is simple enough to kick that can down the road
+ res = {k: v for k, v in res.items() if v is not None}
+ return res, remaining
+
+
+def check_metadata(metadata: dict[str, Any], allowed: Iterable[str], source_type: Any) -> None:
+ """A small utility function to validate that the given metadata can be applied to the target.
+ More than saving lines of code, this gives us a consistent error message for all of our internal implementations.
+
+ Args:
+ metadata: A dict of metadata.
+ allowed: An iterable of allowed metadata.
+ source_type: The source type.
+
+ Raises:
+ TypeError: If there is metadatas that can't be applied on source type.
+ """
+ unknown = metadata.keys() - set(allowed)
+ if unknown:
+ raise TypeError(
+ f'The following constraints cannot be applied to {source_type!r}: {", ".join([f"{k!r}" for k in unknown])}'
+ )
diff --git a/.venv/lib/python3.12/site-packages/pydantic/_internal/_mock_val_ser.py b/.venv/lib/python3.12/site-packages/pydantic/_internal/_mock_val_ser.py
new file mode 100644
index 00000000..624c68e4
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pydantic/_internal/_mock_val_ser.py
@@ -0,0 +1,235 @@
+from __future__ import annotations
+
+from typing import TYPE_CHECKING, Any, Callable, Generic, Iterator, Mapping, TypeVar, Union
+
+from pydantic_core import CoreSchema, SchemaSerializer, SchemaValidator
+from typing_extensions import Literal
+
+from ..errors import PydanticErrorCodes, PydanticUserError
+from ..plugin._schema_validator import PluggableSchemaValidator
+
+if TYPE_CHECKING:
+ from ..dataclasses import PydanticDataclass
+ from ..main import BaseModel
+ from ..type_adapter import TypeAdapter
+
+
+ValSer = TypeVar('ValSer', bound=Union[SchemaValidator, PluggableSchemaValidator, SchemaSerializer])
+T = TypeVar('T')
+
+
+class MockCoreSchema(Mapping[str, Any]):
+ """Mocker for `pydantic_core.CoreSchema` which optionally attempts to
+ rebuild the thing it's mocking when one of its methods is accessed and raises an error if that fails.
+ """
+
+ __slots__ = '_error_message', '_code', '_attempt_rebuild', '_built_memo'
+
+ def __init__(
+ self,
+ error_message: str,
+ *,
+ code: PydanticErrorCodes,
+ attempt_rebuild: Callable[[], CoreSchema | None] | None = None,
+ ) -> None:
+ self._error_message = error_message
+ self._code: PydanticErrorCodes = code
+ self._attempt_rebuild = attempt_rebuild
+ self._built_memo: CoreSchema | None = None
+
+ def __getitem__(self, key: str) -> Any:
+ return self._get_built().__getitem__(key)
+
+ def __len__(self) -> int:
+ return self._get_built().__len__()
+
+ def __iter__(self) -> Iterator[str]:
+ return self._get_built().__iter__()
+
+ def _get_built(self) -> CoreSchema:
+ if self._built_memo is not None:
+ return self._built_memo
+
+ if self._attempt_rebuild:
+ schema = self._attempt_rebuild()
+ if schema is not None:
+ self._built_memo = schema
+ return schema
+ raise PydanticUserError(self._error_message, code=self._code)
+
+ def rebuild(self) -> CoreSchema | None:
+ self._built_memo = None
+ if self._attempt_rebuild:
+ schema = self._attempt_rebuild()
+ if schema is not None:
+ return schema
+ else:
+ raise PydanticUserError(self._error_message, code=self._code)
+ return None
+
+
+class MockValSer(Generic[ValSer]):
+ """Mocker for `pydantic_core.SchemaValidator` or `pydantic_core.SchemaSerializer` which optionally attempts to
+ rebuild the thing it's mocking when one of its methods is accessed and raises an error if that fails.
+ """
+
+ __slots__ = '_error_message', '_code', '_val_or_ser', '_attempt_rebuild'
+
+ def __init__(
+ self,
+ error_message: str,
+ *,
+ code: PydanticErrorCodes,
+ val_or_ser: Literal['validator', 'serializer'],
+ attempt_rebuild: Callable[[], ValSer | None] | None = None,
+ ) -> None:
+ self._error_message = error_message
+ self._val_or_ser = SchemaValidator if val_or_ser == 'validator' else SchemaSerializer
+ self._code: PydanticErrorCodes = code
+ self._attempt_rebuild = attempt_rebuild
+
+ def __getattr__(self, item: str) -> None:
+ __tracebackhide__ = True
+ if self._attempt_rebuild:
+ val_ser = self._attempt_rebuild()
+ if val_ser is not None:
+ return getattr(val_ser, item)
+
+ # raise an AttributeError if `item` doesn't exist
+ getattr(self._val_or_ser, item)
+ raise PydanticUserError(self._error_message, code=self._code)
+
+ def rebuild(self) -> ValSer | None:
+ if self._attempt_rebuild:
+ val_ser = self._attempt_rebuild()
+ if val_ser is not None:
+ return val_ser
+ else:
+ raise PydanticUserError(self._error_message, code=self._code)
+ return None
+
+
+def set_type_adapter_mocks(adapter: TypeAdapter, type_repr: str) -> None:
+ """Set `core_schema`, `validator` and `serializer` to mock core types on a type adapter instance.
+
+ Args:
+ adapter: The type adapter instance to set the mocks on
+ type_repr: Name of the type used in the adapter, used in error messages
+ """
+ undefined_type_error_message = (
+ f'`TypeAdapter[{type_repr}]` is not fully defined; you should define `{type_repr}` and all referenced types,'
+ f' then call `.rebuild()` on the instance.'
+ )
+
+ def attempt_rebuild_fn(attr_fn: Callable[[TypeAdapter], T]) -> Callable[[], T | None]:
+ def handler() -> T | None:
+ if adapter.rebuild(raise_errors=False, _parent_namespace_depth=5) is not False:
+ return attr_fn(adapter)
+ else:
+ return None
+
+ return handler
+
+ adapter.core_schema = MockCoreSchema( # pyright: ignore[reportAttributeAccessIssue]
+ undefined_type_error_message,
+ code='class-not-fully-defined',
+ attempt_rebuild=attempt_rebuild_fn(lambda ta: ta.core_schema),
+ )
+ adapter.validator = MockValSer( # pyright: ignore[reportAttributeAccessIssue]
+ undefined_type_error_message,
+ code='class-not-fully-defined',
+ val_or_ser='validator',
+ attempt_rebuild=attempt_rebuild_fn(lambda ta: ta.validator),
+ )
+ adapter.serializer = MockValSer( # pyright: ignore[reportAttributeAccessIssue]
+ undefined_type_error_message,
+ code='class-not-fully-defined',
+ val_or_ser='serializer',
+ attempt_rebuild=attempt_rebuild_fn(lambda ta: ta.serializer),
+ )
+
+
+def set_model_mocks(cls: type[BaseModel], cls_name: str, undefined_name: str = 'all referenced types') -> None:
+ """Set `__pydantic_core_schema__`, `__pydantic_validator__` and `__pydantic_serializer__` to mock core types on a model.
+
+ Args:
+ cls: The model class to set the mocks on
+ cls_name: Name of the model class, used in error messages
+ undefined_name: Name of the undefined thing, used in error messages
+ """
+ undefined_type_error_message = (
+ f'`{cls_name}` is not fully defined; you should define {undefined_name},'
+ f' then call `{cls_name}.model_rebuild()`.'
+ )
+
+ def attempt_rebuild_fn(attr_fn: Callable[[type[BaseModel]], T]) -> Callable[[], T | None]:
+ def handler() -> T | None:
+ if cls.model_rebuild(raise_errors=False, _parent_namespace_depth=5) is not False:
+ return attr_fn(cls)
+ else:
+ return None
+
+ return handler
+
+ cls.__pydantic_core_schema__ = MockCoreSchema( # pyright: ignore[reportAttributeAccessIssue]
+ undefined_type_error_message,
+ code='class-not-fully-defined',
+ attempt_rebuild=attempt_rebuild_fn(lambda c: c.__pydantic_core_schema__),
+ )
+ cls.__pydantic_validator__ = MockValSer( # pyright: ignore[reportAttributeAccessIssue]
+ undefined_type_error_message,
+ code='class-not-fully-defined',
+ val_or_ser='validator',
+ attempt_rebuild=attempt_rebuild_fn(lambda c: c.__pydantic_validator__),
+ )
+ cls.__pydantic_serializer__ = MockValSer( # pyright: ignore[reportAttributeAccessIssue]
+ undefined_type_error_message,
+ code='class-not-fully-defined',
+ val_or_ser='serializer',
+ attempt_rebuild=attempt_rebuild_fn(lambda c: c.__pydantic_serializer__),
+ )
+
+
+def set_dataclass_mocks(
+ cls: type[PydanticDataclass], cls_name: str, undefined_name: str = 'all referenced types'
+) -> None:
+ """Set `__pydantic_validator__` and `__pydantic_serializer__` to `MockValSer`s on a dataclass.
+
+ Args:
+ cls: The model class to set the mocks on
+ cls_name: Name of the model class, used in error messages
+ undefined_name: Name of the undefined thing, used in error messages
+ """
+ from ..dataclasses import rebuild_dataclass
+
+ undefined_type_error_message = (
+ f'`{cls_name}` is not fully defined; you should define {undefined_name},'
+ f' then call `pydantic.dataclasses.rebuild_dataclass({cls_name})`.'
+ )
+
+ def attempt_rebuild_fn(attr_fn: Callable[[type[PydanticDataclass]], T]) -> Callable[[], T | None]:
+ def handler() -> T | None:
+ if rebuild_dataclass(cls, raise_errors=False, _parent_namespace_depth=5) is not False:
+ return attr_fn(cls)
+ else:
+ return None
+
+ return handler
+
+ cls.__pydantic_core_schema__ = MockCoreSchema( # pyright: ignore[reportAttributeAccessIssue]
+ undefined_type_error_message,
+ code='class-not-fully-defined',
+ attempt_rebuild=attempt_rebuild_fn(lambda c: c.__pydantic_core_schema__),
+ )
+ cls.__pydantic_validator__ = MockValSer( # pyright: ignore[reportAttributeAccessIssue]
+ undefined_type_error_message,
+ code='class-not-fully-defined',
+ val_or_ser='validator',
+ attempt_rebuild=attempt_rebuild_fn(lambda c: c.__pydantic_validator__),
+ )
+ cls.__pydantic_serializer__ = MockValSer( # pyright: ignore[reportAttributeAccessIssue]
+ undefined_type_error_message,
+ code='class-not-fully-defined',
+ val_or_ser='serializer',
+ attempt_rebuild=attempt_rebuild_fn(lambda c: c.__pydantic_serializer__),
+ )
diff --git a/.venv/lib/python3.12/site-packages/pydantic/_internal/_model_construction.py b/.venv/lib/python3.12/site-packages/pydantic/_internal/_model_construction.py
new file mode 100644
index 00000000..7670ff80
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pydantic/_internal/_model_construction.py
@@ -0,0 +1,792 @@
+"""Private logic for creating models."""
+
+from __future__ import annotations as _annotations
+
+import builtins
+import operator
+import sys
+import typing
+import warnings
+import weakref
+from abc import ABCMeta
+from functools import lru_cache, partial
+from types import FunctionType
+from typing import Any, Callable, Generic, Literal, NoReturn, cast
+
+from pydantic_core import PydanticUndefined, SchemaSerializer
+from typing_extensions import TypeAliasType, dataclass_transform, deprecated, get_args
+
+from ..errors import PydanticUndefinedAnnotation, PydanticUserError
+from ..plugin._schema_validator import create_schema_validator
+from ..warnings import GenericBeforeBaseModelWarning, PydanticDeprecatedSince20
+from ._config import ConfigWrapper
+from ._decorators import DecoratorInfos, PydanticDescriptorProxy, get_attribute_from_bases, unwrap_wrapped_function
+from ._fields import collect_model_fields, is_valid_field_name, is_valid_privateattr_name
+from ._generate_schema import GenerateSchema
+from ._generics import PydanticGenericMetadata, get_model_typevars_map
+from ._import_utils import import_cached_base_model, import_cached_field_info
+from ._mock_val_ser import set_model_mocks
+from ._namespace_utils import NsResolver
+from ._schema_generation_shared import CallbackGetCoreSchemaHandler
+from ._signature import generate_pydantic_signature
+from ._typing_extra import (
+ _make_forward_ref,
+ eval_type_backport,
+ is_annotated,
+ is_classvar_annotation,
+ parent_frame_namespace,
+)
+from ._utils import LazyClassAttribute, SafeGetItemProxy
+
+if typing.TYPE_CHECKING:
+ from ..fields import ComputedFieldInfo, FieldInfo, ModelPrivateAttr
+ from ..fields import Field as PydanticModelField
+ from ..fields import PrivateAttr as PydanticModelPrivateAttr
+ from ..main import BaseModel
+else:
+ # See PyCharm issues https://youtrack.jetbrains.com/issue/PY-21915
+ # and https://youtrack.jetbrains.com/issue/PY-51428
+ DeprecationWarning = PydanticDeprecatedSince20
+ PydanticModelField = object()
+ PydanticModelPrivateAttr = object()
+
+object_setattr = object.__setattr__
+
+
+class _ModelNamespaceDict(dict):
+ """A dictionary subclass that intercepts attribute setting on model classes and
+ warns about overriding of decorators.
+ """
+
+ def __setitem__(self, k: str, v: object) -> None:
+ existing: Any = self.get(k, None)
+ if existing and v is not existing and isinstance(existing, PydanticDescriptorProxy):
+ warnings.warn(f'`{k}` overrides an existing Pydantic `{existing.decorator_info.decorator_repr}` decorator')
+
+ return super().__setitem__(k, v)
+
+
+def NoInitField(
+ *,
+ init: Literal[False] = False,
+) -> Any:
+ """Only for typing purposes. Used as default value of `__pydantic_fields_set__`,
+ `__pydantic_extra__`, `__pydantic_private__`, so they could be ignored when
+ synthesizing the `__init__` signature.
+ """
+
+
+@dataclass_transform(kw_only_default=True, field_specifiers=(PydanticModelField, PydanticModelPrivateAttr, NoInitField))
+class ModelMetaclass(ABCMeta):
+ def __new__(
+ mcs,
+ cls_name: str,
+ bases: tuple[type[Any], ...],
+ namespace: dict[str, Any],
+ __pydantic_generic_metadata__: PydanticGenericMetadata | None = None,
+ __pydantic_reset_parent_namespace__: bool = True,
+ _create_model_module: str | None = None,
+ **kwargs: Any,
+ ) -> type:
+ """Metaclass for creating Pydantic models.
+
+ Args:
+ cls_name: The name of the class to be created.
+ bases: The base classes of the class to be created.
+ namespace: The attribute dictionary of the class to be created.
+ __pydantic_generic_metadata__: Metadata for generic models.
+ __pydantic_reset_parent_namespace__: Reset parent namespace.
+ _create_model_module: The module of the class to be created, if created by `create_model`.
+ **kwargs: Catch-all for any other keyword arguments.
+
+ Returns:
+ The new class created by the metaclass.
+ """
+ # Note `ModelMetaclass` refers to `BaseModel`, but is also used to *create* `BaseModel`, so we rely on the fact
+ # that `BaseModel` itself won't have any bases, but any subclass of it will, to determine whether the `__new__`
+ # call we're in the middle of is for the `BaseModel` class.
+ if bases:
+ base_field_names, class_vars, base_private_attributes = mcs._collect_bases_data(bases)
+
+ config_wrapper = ConfigWrapper.for_model(bases, namespace, kwargs)
+ namespace['model_config'] = config_wrapper.config_dict
+ private_attributes = inspect_namespace(
+ namespace, config_wrapper.ignored_types, class_vars, base_field_names
+ )
+ if private_attributes or base_private_attributes:
+ original_model_post_init = get_model_post_init(namespace, bases)
+ if original_model_post_init is not None:
+ # if there are private_attributes and a model_post_init function, we handle both
+
+ def wrapped_model_post_init(self: BaseModel, context: Any, /) -> None:
+ """We need to both initialize private attributes and call the user-defined model_post_init
+ method.
+ """
+ init_private_attributes(self, context)
+ original_model_post_init(self, context)
+
+ namespace['model_post_init'] = wrapped_model_post_init
+ else:
+ namespace['model_post_init'] = init_private_attributes
+
+ namespace['__class_vars__'] = class_vars
+ namespace['__private_attributes__'] = {**base_private_attributes, **private_attributes}
+
+ cls = cast('type[BaseModel]', super().__new__(mcs, cls_name, bases, namespace, **kwargs))
+ BaseModel_ = import_cached_base_model()
+
+ mro = cls.__mro__
+ if Generic in mro and mro.index(Generic) < mro.index(BaseModel_):
+ warnings.warn(
+ GenericBeforeBaseModelWarning(
+ 'Classes should inherit from `BaseModel` before generic classes (e.g. `typing.Generic[T]`) '
+ 'for pydantic generics to work properly.'
+ ),
+ stacklevel=2,
+ )
+
+ cls.__pydantic_custom_init__ = not getattr(cls.__init__, '__pydantic_base_init__', False)
+ cls.__pydantic_post_init__ = (
+ None if cls.model_post_init is BaseModel_.model_post_init else 'model_post_init'
+ )
+
+ cls.__pydantic_decorators__ = DecoratorInfos.build(cls)
+
+ # Use the getattr below to grab the __parameters__ from the `typing.Generic` parent class
+ if __pydantic_generic_metadata__:
+ cls.__pydantic_generic_metadata__ = __pydantic_generic_metadata__
+ else:
+ parent_parameters = getattr(cls, '__pydantic_generic_metadata__', {}).get('parameters', ())
+ parameters = getattr(cls, '__parameters__', None) or parent_parameters
+ if parameters and parent_parameters and not all(x in parameters for x in parent_parameters):
+ from ..root_model import RootModelRootType
+
+ missing_parameters = tuple(x for x in parameters if x not in parent_parameters)
+ if RootModelRootType in parent_parameters and RootModelRootType not in parameters:
+ # This is a special case where the user has subclassed `RootModel`, but has not parametrized
+ # RootModel with the generic type identifiers being used. Ex:
+ # class MyModel(RootModel, Generic[T]):
+ # root: T
+ # Should instead just be:
+ # class MyModel(RootModel[T]):
+ # root: T
+ parameters_str = ', '.join([x.__name__ for x in missing_parameters])
+ error_message = (
+ f'{cls.__name__} is a subclass of `RootModel`, but does not include the generic type identifier(s) '
+ f'{parameters_str} in its parameters. '
+ f'You should parametrize RootModel directly, e.g., `class {cls.__name__}(RootModel[{parameters_str}]): ...`.'
+ )
+ else:
+ combined_parameters = parent_parameters + missing_parameters
+ parameters_str = ', '.join([str(x) for x in combined_parameters])
+ generic_type_label = f'typing.Generic[{parameters_str}]'
+ error_message = (
+ f'All parameters must be present on typing.Generic;'
+ f' you should inherit from {generic_type_label}.'
+ )
+ if Generic not in bases: # pragma: no cover
+ # We raise an error here not because it is desirable, but because some cases are mishandled.
+ # It would be nice to remove this error and still have things behave as expected, it's just
+ # challenging because we are using a custom `__class_getitem__` to parametrize generic models,
+ # and not returning a typing._GenericAlias from it.
+ bases_str = ', '.join([x.__name__ for x in bases] + [generic_type_label])
+ error_message += (
+ f' Note: `typing.Generic` must go last: `class {cls.__name__}({bases_str}): ...`)'
+ )
+ raise TypeError(error_message)
+
+ cls.__pydantic_generic_metadata__ = {
+ 'origin': None,
+ 'args': (),
+ 'parameters': parameters,
+ }
+
+ cls.__pydantic_complete__ = False # Ensure this specific class gets completed
+
+ # preserve `__set_name__` protocol defined in https://peps.python.org/pep-0487
+ # for attributes not in `new_namespace` (e.g. private attributes)
+ for name, obj in private_attributes.items():
+ obj.__set_name__(cls, name)
+
+ if __pydantic_reset_parent_namespace__:
+ cls.__pydantic_parent_namespace__ = build_lenient_weakvaluedict(parent_frame_namespace())
+ parent_namespace: dict[str, Any] | None = getattr(cls, '__pydantic_parent_namespace__', None)
+ if isinstance(parent_namespace, dict):
+ parent_namespace = unpack_lenient_weakvaluedict(parent_namespace)
+
+ ns_resolver = NsResolver(parent_namespace=parent_namespace)
+
+ set_model_fields(cls, bases, config_wrapper, ns_resolver)
+
+ if config_wrapper.frozen and '__hash__' not in namespace:
+ set_default_hash_func(cls, bases)
+
+ complete_model_class(
+ cls,
+ cls_name,
+ config_wrapper,
+ raise_errors=False,
+ ns_resolver=ns_resolver,
+ create_model_module=_create_model_module,
+ )
+
+ # If this is placed before the complete_model_class call above,
+ # the generic computed fields return type is set to PydanticUndefined
+ cls.__pydantic_computed_fields__ = {
+ k: v.info for k, v in cls.__pydantic_decorators__.computed_fields.items()
+ }
+
+ set_deprecated_descriptors(cls)
+
+ # using super(cls, cls) on the next line ensures we only call the parent class's __pydantic_init_subclass__
+ # I believe the `type: ignore` is only necessary because mypy doesn't realize that this code branch is
+ # only hit for _proper_ subclasses of BaseModel
+ super(cls, cls).__pydantic_init_subclass__(**kwargs) # type: ignore[misc]
+ return cls
+ else:
+ # These are instance variables, but have been assigned to `NoInitField` to trick the type checker.
+ for instance_slot in '__pydantic_fields_set__', '__pydantic_extra__', '__pydantic_private__':
+ namespace.pop(
+ instance_slot,
+ None, # In case the metaclass is used with a class other than `BaseModel`.
+ )
+ namespace.get('__annotations__', {}).clear()
+ return super().__new__(mcs, cls_name, bases, namespace, **kwargs)
+
+ if not typing.TYPE_CHECKING: # pragma: no branch
+ # We put `__getattr__` in a non-TYPE_CHECKING block because otherwise, mypy allows arbitrary attribute access
+
+ def __getattr__(self, item: str) -> Any:
+ """This is necessary to keep attribute access working for class attribute access."""
+ private_attributes = self.__dict__.get('__private_attributes__')
+ if private_attributes and item in private_attributes:
+ return private_attributes[item]
+ raise AttributeError(item)
+
+ @classmethod
+ def __prepare__(cls, *args: Any, **kwargs: Any) -> dict[str, object]:
+ return _ModelNamespaceDict()
+
+ def __instancecheck__(self, instance: Any) -> bool:
+ """Avoid calling ABC _abc_subclasscheck unless we're pretty sure.
+
+ See #3829 and python/cpython#92810
+ """
+ return hasattr(instance, '__pydantic_validator__') and super().__instancecheck__(instance)
+
+ @staticmethod
+ def _collect_bases_data(bases: tuple[type[Any], ...]) -> tuple[set[str], set[str], dict[str, ModelPrivateAttr]]:
+ BaseModel = import_cached_base_model()
+
+ field_names: set[str] = set()
+ class_vars: set[str] = set()
+ private_attributes: dict[str, ModelPrivateAttr] = {}
+ for base in bases:
+ if issubclass(base, BaseModel) and base is not BaseModel:
+ # model_fields might not be defined yet in the case of generics, so we use getattr here:
+ field_names.update(getattr(base, '__pydantic_fields__', {}).keys())
+ class_vars.update(base.__class_vars__)
+ private_attributes.update(base.__private_attributes__)
+ return field_names, class_vars, private_attributes
+
+ @property
+ @deprecated('The `__fields__` attribute is deprecated, use `model_fields` instead.', category=None)
+ def __fields__(self) -> dict[str, FieldInfo]:
+ warnings.warn(
+ 'The `__fields__` attribute is deprecated, use `model_fields` instead.',
+ PydanticDeprecatedSince20,
+ stacklevel=2,
+ )
+ return self.model_fields
+
+ @property
+ def model_fields(self) -> dict[str, FieldInfo]:
+ """Get metadata about the fields defined on the model.
+
+ Returns:
+ A mapping of field names to [`FieldInfo`][pydantic.fields.FieldInfo] objects.
+ """
+ return getattr(self, '__pydantic_fields__', {})
+
+ @property
+ def model_computed_fields(self) -> dict[str, ComputedFieldInfo]:
+ """Get metadata about the computed fields defined on the model.
+
+ Returns:
+ A mapping of computed field names to [`ComputedFieldInfo`][pydantic.fields.ComputedFieldInfo] objects.
+ """
+ return getattr(self, '__pydantic_computed_fields__', {})
+
+ def __dir__(self) -> list[str]:
+ attributes = list(super().__dir__())
+ if '__fields__' in attributes:
+ attributes.remove('__fields__')
+ return attributes
+
+
+def init_private_attributes(self: BaseModel, context: Any, /) -> None:
+ """This function is meant to behave like a BaseModel method to initialise private attributes.
+
+ It takes context as an argument since that's what pydantic-core passes when calling it.
+
+ Args:
+ self: The BaseModel instance.
+ context: The context.
+ """
+ if getattr(self, '__pydantic_private__', None) is None:
+ pydantic_private = {}
+ for name, private_attr in self.__private_attributes__.items():
+ default = private_attr.get_default()
+ if default is not PydanticUndefined:
+ pydantic_private[name] = default
+ object_setattr(self, '__pydantic_private__', pydantic_private)
+
+
+def get_model_post_init(namespace: dict[str, Any], bases: tuple[type[Any], ...]) -> Callable[..., Any] | None:
+ """Get the `model_post_init` method from the namespace or the class bases, or `None` if not defined."""
+ if 'model_post_init' in namespace:
+ return namespace['model_post_init']
+
+ BaseModel = import_cached_base_model()
+
+ model_post_init = get_attribute_from_bases(bases, 'model_post_init')
+ if model_post_init is not BaseModel.model_post_init:
+ return model_post_init
+
+
+def inspect_namespace( # noqa C901
+ namespace: dict[str, Any],
+ ignored_types: tuple[type[Any], ...],
+ base_class_vars: set[str],
+ base_class_fields: set[str],
+) -> dict[str, ModelPrivateAttr]:
+ """Iterate over the namespace and:
+ * gather private attributes
+ * check for items which look like fields but are not (e.g. have no annotation) and warn.
+
+ Args:
+ namespace: The attribute dictionary of the class to be created.
+ ignored_types: A tuple of ignore types.
+ base_class_vars: A set of base class class variables.
+ base_class_fields: A set of base class fields.
+
+ Returns:
+ A dict contains private attributes info.
+
+ Raises:
+ TypeError: If there is a `__root__` field in model.
+ NameError: If private attribute name is invalid.
+ PydanticUserError:
+ - If a field does not have a type annotation.
+ - If a field on base class was overridden by a non-annotated attribute.
+ """
+ from ..fields import ModelPrivateAttr, PrivateAttr
+
+ FieldInfo = import_cached_field_info()
+
+ all_ignored_types = ignored_types + default_ignored_types()
+
+ private_attributes: dict[str, ModelPrivateAttr] = {}
+ raw_annotations = namespace.get('__annotations__', {})
+
+ if '__root__' in raw_annotations or '__root__' in namespace:
+ raise TypeError("To define root models, use `pydantic.RootModel` rather than a field called '__root__'")
+
+ ignored_names: set[str] = set()
+ for var_name, value in list(namespace.items()):
+ if var_name == 'model_config' or var_name == '__pydantic_extra__':
+ continue
+ elif (
+ isinstance(value, type)
+ and value.__module__ == namespace['__module__']
+ and '__qualname__' in namespace
+ and value.__qualname__.startswith(namespace['__qualname__'])
+ ):
+ # `value` is a nested type defined in this namespace; don't error
+ continue
+ elif isinstance(value, all_ignored_types) or value.__class__.__module__ == 'functools':
+ ignored_names.add(var_name)
+ continue
+ elif isinstance(value, ModelPrivateAttr):
+ if var_name.startswith('__'):
+ raise NameError(
+ 'Private attributes must not use dunder names;'
+ f' use a single underscore prefix instead of {var_name!r}.'
+ )
+ elif is_valid_field_name(var_name):
+ raise NameError(
+ 'Private attributes must not use valid field names;'
+ f' use sunder names, e.g. {"_" + var_name!r} instead of {var_name!r}.'
+ )
+ private_attributes[var_name] = value
+ del namespace[var_name]
+ elif isinstance(value, FieldInfo) and not is_valid_field_name(var_name):
+ suggested_name = var_name.lstrip('_') or 'my_field' # don't suggest '' for all-underscore name
+ raise NameError(
+ f'Fields must not use names with leading underscores;'
+ f' e.g., use {suggested_name!r} instead of {var_name!r}.'
+ )
+
+ elif var_name.startswith('__'):
+ continue
+ elif is_valid_privateattr_name(var_name):
+ if var_name not in raw_annotations or not is_classvar_annotation(raw_annotations[var_name]):
+ private_attributes[var_name] = cast(ModelPrivateAttr, PrivateAttr(default=value))
+ del namespace[var_name]
+ elif var_name in base_class_vars:
+ continue
+ elif var_name not in raw_annotations:
+ if var_name in base_class_fields:
+ raise PydanticUserError(
+ f'Field {var_name!r} defined on a base class was overridden by a non-annotated attribute. '
+ f'All field definitions, including overrides, require a type annotation.',
+ code='model-field-overridden',
+ )
+ elif isinstance(value, FieldInfo):
+ raise PydanticUserError(
+ f'Field {var_name!r} requires a type annotation', code='model-field-missing-annotation'
+ )
+ else:
+ raise PydanticUserError(
+ f'A non-annotated attribute was detected: `{var_name} = {value!r}`. All model fields require a '
+ f'type annotation; if `{var_name}` is not meant to be a field, you may be able to resolve this '
+ f"error by annotating it as a `ClassVar` or updating `model_config['ignored_types']`.",
+ code='model-field-missing-annotation',
+ )
+
+ for ann_name, ann_type in raw_annotations.items():
+ if (
+ is_valid_privateattr_name(ann_name)
+ and ann_name not in private_attributes
+ and ann_name not in ignored_names
+ # This condition can be a false negative when `ann_type` is stringified,
+ # but it is handled in most cases in `set_model_fields`:
+ and not is_classvar_annotation(ann_type)
+ and ann_type not in all_ignored_types
+ and getattr(ann_type, '__module__', None) != 'functools'
+ ):
+ if isinstance(ann_type, str):
+ # Walking up the frames to get the module namespace where the model is defined
+ # (as the model class wasn't created yet, we unfortunately can't use `cls.__module__`):
+ frame = sys._getframe(2)
+ if frame is not None:
+ try:
+ ann_type = eval_type_backport(
+ _make_forward_ref(ann_type, is_argument=False, is_class=True),
+ globalns=frame.f_globals,
+ localns=frame.f_locals,
+ )
+ except (NameError, TypeError):
+ pass
+
+ if is_annotated(ann_type):
+ _, *metadata = get_args(ann_type)
+ private_attr = next((v for v in metadata if isinstance(v, ModelPrivateAttr)), None)
+ if private_attr is not None:
+ private_attributes[ann_name] = private_attr
+ continue
+ private_attributes[ann_name] = PrivateAttr()
+
+ return private_attributes
+
+
+def set_default_hash_func(cls: type[BaseModel], bases: tuple[type[Any], ...]) -> None:
+ base_hash_func = get_attribute_from_bases(bases, '__hash__')
+ new_hash_func = make_hash_func(cls)
+ if base_hash_func in {None, object.__hash__} or getattr(base_hash_func, '__code__', None) == new_hash_func.__code__:
+ # If `__hash__` is some default, we generate a hash function.
+ # It will be `None` if not overridden from BaseModel.
+ # It may be `object.__hash__` if there is another
+ # parent class earlier in the bases which doesn't override `__hash__` (e.g. `typing.Generic`).
+ # It may be a value set by `set_default_hash_func` if `cls` is a subclass of another frozen model.
+ # In the last case we still need a new hash function to account for new `model_fields`.
+ cls.__hash__ = new_hash_func
+
+
+def make_hash_func(cls: type[BaseModel]) -> Any:
+ getter = operator.itemgetter(*cls.__pydantic_fields__.keys()) if cls.__pydantic_fields__ else lambda _: 0
+
+ def hash_func(self: Any) -> int:
+ try:
+ return hash(getter(self.__dict__))
+ except KeyError:
+ # In rare cases (such as when using the deprecated copy method), the __dict__ may not contain
+ # all model fields, which is how we can get here.
+ # getter(self.__dict__) is much faster than any 'safe' method that accounts for missing keys,
+ # and wrapping it in a `try` doesn't slow things down much in the common case.
+ return hash(getter(SafeGetItemProxy(self.__dict__)))
+
+ return hash_func
+
+
+def set_model_fields(
+ cls: type[BaseModel],
+ bases: tuple[type[Any], ...],
+ config_wrapper: ConfigWrapper,
+ ns_resolver: NsResolver | None,
+) -> None:
+ """Collect and set `cls.__pydantic_fields__` and `cls.__class_vars__`.
+
+ Args:
+ cls: BaseModel or dataclass.
+ bases: Parents of the class, generally `cls.__bases__`.
+ config_wrapper: The config wrapper instance.
+ ns_resolver: Namespace resolver to use when getting model annotations.
+ """
+ typevars_map = get_model_typevars_map(cls)
+ fields, class_vars = collect_model_fields(cls, bases, config_wrapper, ns_resolver, typevars_map=typevars_map)
+
+ cls.__pydantic_fields__ = fields
+ cls.__class_vars__.update(class_vars)
+
+ for k in class_vars:
+ # Class vars should not be private attributes
+ # We remove them _here_ and not earlier because we rely on inspecting the class to determine its classvars,
+ # but private attributes are determined by inspecting the namespace _prior_ to class creation.
+ # In the case that a classvar with a leading-'_' is defined via a ForwardRef (e.g., when using
+ # `__future__.annotations`), we want to remove the private attribute which was detected _before_ we knew it
+ # evaluated to a classvar
+
+ value = cls.__private_attributes__.pop(k, None)
+ if value is not None and value.default is not PydanticUndefined:
+ setattr(cls, k, value.default)
+
+
+def complete_model_class(
+ cls: type[BaseModel],
+ cls_name: str,
+ config_wrapper: ConfigWrapper,
+ *,
+ raise_errors: bool = True,
+ ns_resolver: NsResolver | None = None,
+ create_model_module: str | None = None,
+) -> bool:
+ """Finish building a model class.
+
+ This logic must be called after class has been created since validation functions must be bound
+ and `get_type_hints` requires a class object.
+
+ Args:
+ cls: BaseModel or dataclass.
+ cls_name: The model or dataclass name.
+ config_wrapper: The config wrapper instance.
+ raise_errors: Whether to raise errors.
+ ns_resolver: The namespace resolver instance to use during schema building.
+ create_model_module: The module of the class to be created, if created by `create_model`.
+
+ Returns:
+ `True` if the model is successfully completed, else `False`.
+
+ Raises:
+ PydanticUndefinedAnnotation: If `PydanticUndefinedAnnotation` occurs in`__get_pydantic_core_schema__`
+ and `raise_errors=True`.
+ """
+ if config_wrapper.defer_build:
+ set_model_mocks(cls, cls_name)
+ return False
+
+ typevars_map = get_model_typevars_map(cls)
+ gen_schema = GenerateSchema(
+ config_wrapper,
+ ns_resolver,
+ typevars_map,
+ )
+
+ handler = CallbackGetCoreSchemaHandler(
+ partial(gen_schema.generate_schema, from_dunder_get_core_schema=False),
+ gen_schema,
+ ref_mode='unpack',
+ )
+
+ try:
+ schema = cls.__get_pydantic_core_schema__(cls, handler)
+ except PydanticUndefinedAnnotation as e:
+ if raise_errors:
+ raise
+ set_model_mocks(cls, cls_name, f'`{e.name}`')
+ return False
+
+ core_config = config_wrapper.core_config(title=cls.__name__)
+
+ try:
+ schema = gen_schema.clean_schema(schema)
+ except gen_schema.CollectedInvalid:
+ set_model_mocks(cls, cls_name)
+ return False
+
+ # debug(schema)
+ cls.__pydantic_core_schema__ = schema
+
+ cls.__pydantic_validator__ = create_schema_validator(
+ schema,
+ cls,
+ create_model_module or cls.__module__,
+ cls.__qualname__,
+ 'create_model' if create_model_module else 'BaseModel',
+ core_config,
+ config_wrapper.plugin_settings,
+ )
+ cls.__pydantic_serializer__ = SchemaSerializer(schema, core_config)
+ cls.__pydantic_complete__ = True
+
+ # set __signature__ attr only for model class, but not for its instances
+ # (because instances can define `__call__`, and `inspect.signature` shouldn't
+ # use the `__signature__` attribute and instead generate from `__call__`).
+ cls.__signature__ = LazyClassAttribute(
+ '__signature__',
+ partial(
+ generate_pydantic_signature,
+ init=cls.__init__,
+ fields=cls.__pydantic_fields__,
+ populate_by_name=config_wrapper.populate_by_name,
+ extra=config_wrapper.extra,
+ ),
+ )
+ return True
+
+
+def set_deprecated_descriptors(cls: type[BaseModel]) -> None:
+ """Set data descriptors on the class for deprecated fields."""
+ for field, field_info in cls.__pydantic_fields__.items():
+ if (msg := field_info.deprecation_message) is not None:
+ desc = _DeprecatedFieldDescriptor(msg)
+ desc.__set_name__(cls, field)
+ setattr(cls, field, desc)
+
+ for field, computed_field_info in cls.__pydantic_computed_fields__.items():
+ if (
+ (msg := computed_field_info.deprecation_message) is not None
+ # Avoid having two warnings emitted:
+ and not hasattr(unwrap_wrapped_function(computed_field_info.wrapped_property), '__deprecated__')
+ ):
+ desc = _DeprecatedFieldDescriptor(msg, computed_field_info.wrapped_property)
+ desc.__set_name__(cls, field)
+ setattr(cls, field, desc)
+
+
+class _DeprecatedFieldDescriptor:
+ """Read-only data descriptor used to emit a runtime deprecation warning before accessing a deprecated field.
+
+ Attributes:
+ msg: The deprecation message to be emitted.
+ wrapped_property: The property instance if the deprecated field is a computed field, or `None`.
+ field_name: The name of the field being deprecated.
+ """
+
+ field_name: str
+
+ def __init__(self, msg: str, wrapped_property: property | None = None) -> None:
+ self.msg = msg
+ self.wrapped_property = wrapped_property
+
+ def __set_name__(self, cls: type[BaseModel], name: str) -> None:
+ self.field_name = name
+
+ def __get__(self, obj: BaseModel | None, obj_type: type[BaseModel] | None = None) -> Any:
+ if obj is None:
+ if self.wrapped_property is not None:
+ return self.wrapped_property.__get__(None, obj_type)
+ raise AttributeError(self.field_name)
+
+ warnings.warn(self.msg, builtins.DeprecationWarning, stacklevel=2)
+
+ if self.wrapped_property is not None:
+ return self.wrapped_property.__get__(obj, obj_type)
+ return obj.__dict__[self.field_name]
+
+ # Defined to make it a data descriptor and take precedence over the instance's dictionary.
+ # Note that it will not be called when setting a value on a model instance
+ # as `BaseModel.__setattr__` is defined and takes priority.
+ def __set__(self, obj: Any, value: Any) -> NoReturn:
+ raise AttributeError(self.field_name)
+
+
+class _PydanticWeakRef:
+ """Wrapper for `weakref.ref` that enables `pickle` serialization.
+
+ Cloudpickle fails to serialize `weakref.ref` objects due to an arcane error related
+ to abstract base classes (`abc.ABC`). This class works around the issue by wrapping
+ `weakref.ref` instead of subclassing it.
+
+ See https://github.com/pydantic/pydantic/issues/6763 for context.
+
+ Semantics:
+ - If not pickled, behaves the same as a `weakref.ref`.
+ - If pickled along with the referenced object, the same `weakref.ref` behavior
+ will be maintained between them after unpickling.
+ - If pickled without the referenced object, after unpickling the underlying
+ reference will be cleared (`__call__` will always return `None`).
+ """
+
+ def __init__(self, obj: Any):
+ if obj is None:
+ # The object will be `None` upon deserialization if the serialized weakref
+ # had lost its underlying object.
+ self._wr = None
+ else:
+ self._wr = weakref.ref(obj)
+
+ def __call__(self) -> Any:
+ if self._wr is None:
+ return None
+ else:
+ return self._wr()
+
+ def __reduce__(self) -> tuple[Callable, tuple[weakref.ReferenceType | None]]:
+ return _PydanticWeakRef, (self(),)
+
+
+def build_lenient_weakvaluedict(d: dict[str, Any] | None) -> dict[str, Any] | None:
+ """Takes an input dictionary, and produces a new value that (invertibly) replaces the values with weakrefs.
+
+ We can't just use a WeakValueDictionary because many types (including int, str, etc.) can't be stored as values
+ in a WeakValueDictionary.
+
+ The `unpack_lenient_weakvaluedict` function can be used to reverse this operation.
+ """
+ if d is None:
+ return None
+ result = {}
+ for k, v in d.items():
+ try:
+ proxy = _PydanticWeakRef(v)
+ except TypeError:
+ proxy = v
+ result[k] = proxy
+ return result
+
+
+def unpack_lenient_weakvaluedict(d: dict[str, Any] | None) -> dict[str, Any] | None:
+ """Inverts the transform performed by `build_lenient_weakvaluedict`."""
+ if d is None:
+ return None
+
+ result = {}
+ for k, v in d.items():
+ if isinstance(v, _PydanticWeakRef):
+ v = v()
+ if v is not None:
+ result[k] = v
+ else:
+ result[k] = v
+ return result
+
+
+@lru_cache(maxsize=None)
+def default_ignored_types() -> tuple[type[Any], ...]:
+ from ..fields import ComputedFieldInfo
+
+ ignored_types = [
+ FunctionType,
+ property,
+ classmethod,
+ staticmethod,
+ PydanticDescriptorProxy,
+ ComputedFieldInfo,
+ TypeAliasType, # from `typing_extensions`
+ ]
+
+ if sys.version_info >= (3, 12):
+ ignored_types.append(typing.TypeAliasType)
+
+ return tuple(ignored_types)
diff --git a/.venv/lib/python3.12/site-packages/pydantic/_internal/_namespace_utils.py b/.venv/lib/python3.12/site-packages/pydantic/_internal/_namespace_utils.py
new file mode 100644
index 00000000..799c4c4e
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pydantic/_internal/_namespace_utils.py
@@ -0,0 +1,284 @@
+from __future__ import annotations
+
+import sys
+from collections.abc import Generator
+from contextlib import contextmanager
+from functools import cached_property
+from typing import Any, Callable, Iterator, Mapping, NamedTuple, TypeVar
+
+from typing_extensions import ParamSpec, TypeAlias, TypeAliasType, TypeVarTuple
+
+GlobalsNamespace: TypeAlias = 'dict[str, Any]'
+"""A global namespace.
+
+In most cases, this is a reference to the `__dict__` attribute of a module.
+This namespace type is expected as the `globals` argument during annotations evaluation.
+"""
+
+MappingNamespace: TypeAlias = Mapping[str, Any]
+"""Any kind of namespace.
+
+In most cases, this is a local namespace (e.g. the `__dict__` attribute of a class,
+the [`f_locals`][frame.f_locals] attribute of a frame object, when dealing with types
+defined inside functions).
+This namespace type is expected as the `locals` argument during annotations evaluation.
+"""
+
+_TypeVarLike: TypeAlias = 'TypeVar | ParamSpec | TypeVarTuple'
+
+
+class NamespacesTuple(NamedTuple):
+ """A tuple of globals and locals to be used during annotations evaluation.
+
+ This datastructure is defined as a named tuple so that it can easily be unpacked:
+
+ ```python {lint="skip" test="skip"}
+ def eval_type(typ: type[Any], ns: NamespacesTuple) -> None:
+ return eval(typ, *ns)
+ ```
+ """
+
+ globals: GlobalsNamespace
+ """The namespace to be used as the `globals` argument during annotations evaluation."""
+
+ locals: MappingNamespace
+ """The namespace to be used as the `locals` argument during annotations evaluation."""
+
+
+def get_module_ns_of(obj: Any) -> dict[str, Any]:
+ """Get the namespace of the module where the object is defined.
+
+ Caution: this function does not return a copy of the module namespace, so the result
+ should not be mutated. The burden of enforcing this is on the caller.
+ """
+ module_name = getattr(obj, '__module__', None)
+ if module_name:
+ try:
+ return sys.modules[module_name].__dict__
+ except KeyError:
+ # happens occasionally, see https://github.com/pydantic/pydantic/issues/2363
+ return {}
+ return {}
+
+
+# Note that this class is almost identical to `collections.ChainMap`, but need to enforce
+# immutable mappings here:
+class LazyLocalNamespace(Mapping[str, Any]):
+ """A lazily evaluated mapping, to be used as the `locals` argument during annotations evaluation.
+
+ While the [`eval`][eval] function expects a mapping as the `locals` argument, it only
+ performs `__getitem__` calls. The [`Mapping`][collections.abc.Mapping] abstract base class
+ is fully implemented only for type checking purposes.
+
+ Args:
+ *namespaces: The namespaces to consider, in ascending order of priority.
+
+ Example:
+ ```python {lint="skip" test="skip"}
+ ns = LazyLocalNamespace({'a': 1, 'b': 2}, {'a': 3})
+ ns['a']
+ #> 3
+ ns['b']
+ #> 2
+ ```
+ """
+
+ def __init__(self, *namespaces: MappingNamespace) -> None:
+ self._namespaces = namespaces
+
+ @cached_property
+ def data(self) -> dict[str, Any]:
+ return {k: v for ns in self._namespaces for k, v in ns.items()}
+
+ def __len__(self) -> int:
+ return len(self.data)
+
+ def __getitem__(self, key: str) -> Any:
+ return self.data[key]
+
+ def __contains__(self, key: object) -> bool:
+ return key in self.data
+
+ def __iter__(self) -> Iterator[str]:
+ return iter(self.data)
+
+
+def ns_for_function(obj: Callable[..., Any], parent_namespace: MappingNamespace | None = None) -> NamespacesTuple:
+ """Return the global and local namespaces to be used when evaluating annotations for the provided function.
+
+ The global namespace will be the `__dict__` attribute of the module the function was defined in.
+ The local namespace will contain the `__type_params__` introduced by PEP 695.
+
+ Args:
+ obj: The object to use when building namespaces.
+ parent_namespace: Optional namespace to be added with the lowest priority in the local namespace.
+ If the passed function is a method, the `parent_namespace` will be the namespace of the class
+ the method is defined in. Thus, we also fetch type `__type_params__` from there (i.e. the
+ class-scoped type variables).
+ """
+ locals_list: list[MappingNamespace] = []
+ if parent_namespace is not None:
+ locals_list.append(parent_namespace)
+
+ # Get the `__type_params__` attribute introduced by PEP 695.
+ # Note that the `typing._eval_type` function expects type params to be
+ # passed as a separate argument. However, internally, `_eval_type` calls
+ # `ForwardRef._evaluate` which will merge type params with the localns,
+ # essentially mimicking what we do here.
+ type_params: tuple[_TypeVarLike, ...]
+ if hasattr(obj, '__type_params__'):
+ type_params = obj.__type_params__
+ else:
+ type_params = ()
+ if parent_namespace is not None:
+ # We also fetch type params from the parent namespace. If present, it probably
+ # means the function was defined in a class. This is to support the following:
+ # https://github.com/python/cpython/issues/124089.
+ type_params += parent_namespace.get('__type_params__', ())
+
+ locals_list.append({t.__name__: t for t in type_params})
+
+ # What about short-cirtuiting to `obj.__globals__`?
+ globalns = get_module_ns_of(obj)
+
+ return NamespacesTuple(globalns, LazyLocalNamespace(*locals_list))
+
+
+class NsResolver:
+ """A class responsible for the namespaces resolving logic for annotations evaluation.
+
+ This class handles the namespace logic when evaluating annotations mainly for class objects.
+
+ It holds a stack of classes that are being inspected during the core schema building,
+ and the `types_namespace` property exposes the globals and locals to be used for
+ type annotation evaluation. Additionally -- if no class is present in the stack -- a
+ fallback globals and locals can be provided using the `namespaces_tuple` argument
+ (this is useful when generating a schema for a simple annotation, e.g. when using
+ `TypeAdapter`).
+
+ The namespace creation logic is unfortunately flawed in some cases, for backwards
+ compatibility reasons and to better support valid edge cases. See the description
+ for the `parent_namespace` argument and the example for more details.
+
+ Args:
+ namespaces_tuple: The default globals and locals to use if no class is present
+ on the stack. This can be useful when using the `GenerateSchema` class
+ with `TypeAdapter`, where the "type" being analyzed is a simple annotation.
+ parent_namespace: An optional parent namespace that will be added to the locals
+ with the lowest priority. For a given class defined in a function, the locals
+ of this function are usually used as the parent namespace:
+
+ ```python {lint="skip" test="skip"}
+ from pydantic import BaseModel
+
+ def func() -> None:
+ SomeType = int
+
+ class Model(BaseModel):
+ f: 'SomeType'
+
+ # when collecting fields, an namespace resolver instance will be created
+ # this way:
+ # ns_resolver = NsResolver(parent_namespace={'SomeType': SomeType})
+ ```
+
+ For backwards compatibility reasons and to support valid edge cases, this parent
+ namespace will be used for *every* type being pushed to the stack. In the future,
+ we might want to be smarter by only doing so when the type being pushed is defined
+ in the same module as the parent namespace.
+
+ Example:
+ ```python {lint="skip" test="skip"}
+ ns_resolver = NsResolver(
+ parent_namespace={'fallback': 1},
+ )
+
+ class Sub:
+ m: 'Model'
+
+ class Model:
+ some_local = 1
+ sub: Sub
+
+ ns_resolver = NsResolver()
+
+ # This is roughly what happens when we build a core schema for `Model`:
+ with ns_resolver.push(Model):
+ ns_resolver.types_namespace
+ #> NamespacesTuple({'Sub': Sub}, {'Model': Model, 'some_local': 1})
+ # First thing to notice here, the model being pushed is added to the locals.
+ # Because `NsResolver` is being used during the model definition, it is not
+ # yet added to the globals. This is useful when resolving self-referencing annotations.
+
+ with ns_resolver.push(Sub):
+ ns_resolver.types_namespace
+ #> NamespacesTuple({'Sub': Sub}, {'Sub': Sub, 'Model': Model})
+ # Second thing to notice: `Sub` is present in both the globals and locals.
+ # This is not an issue, just that as described above, the model being pushed
+ # is added to the locals, but it happens to be present in the globals as well
+ # because it is already defined.
+ # Third thing to notice: `Model` is also added in locals. This is a backwards
+ # compatibility workaround that allows for `Sub` to be able to resolve `'Model'`
+ # correctly (as otherwise models would have to be rebuilt even though this
+ # doesn't look necessary).
+ ```
+ """
+
+ def __init__(
+ self,
+ namespaces_tuple: NamespacesTuple | None = None,
+ parent_namespace: MappingNamespace | None = None,
+ ) -> None:
+ self._base_ns_tuple = namespaces_tuple or NamespacesTuple({}, {})
+ self._parent_ns = parent_namespace
+ self._types_stack: list[type[Any] | TypeAliasType] = []
+
+ @cached_property
+ def types_namespace(self) -> NamespacesTuple:
+ """The current global and local namespaces to be used for annotations evaluation."""
+ if not self._types_stack:
+ # TODO: should we merge the parent namespace here?
+ # This is relevant for TypeAdapter, where there are no types on the stack, and we might
+ # need access to the parent_ns. Right now, we sidestep this in `type_adapter.py` by passing
+ # locals to both parent_ns and the base_ns_tuple, but this is a bit hacky.
+ # we might consider something like:
+ # if self._parent_ns is not None:
+ # # Hacky workarounds, see class docstring:
+ # # An optional parent namespace that will be added to the locals with the lowest priority
+ # locals_list: list[MappingNamespace] = [self._parent_ns, self._base_ns_tuple.locals]
+ # return NamespacesTuple(self._base_ns_tuple.globals, LazyLocalNamespace(*locals_list))
+ return self._base_ns_tuple
+
+ typ = self._types_stack[-1]
+
+ globalns = get_module_ns_of(typ)
+
+ locals_list: list[MappingNamespace] = []
+ # Hacky workarounds, see class docstring:
+ # An optional parent namespace that will be added to the locals with the lowest priority
+ if self._parent_ns is not None:
+ locals_list.append(self._parent_ns)
+ if len(self._types_stack) > 1:
+ first_type = self._types_stack[0]
+ locals_list.append({first_type.__name__: first_type})
+
+ if hasattr(typ, '__dict__'):
+ # TypeAliasType is the exception.
+ locals_list.append(vars(typ))
+
+ # The len check above presents this from being added twice:
+ locals_list.append({typ.__name__: typ})
+
+ return NamespacesTuple(globalns, LazyLocalNamespace(*locals_list))
+
+ @contextmanager
+ def push(self, typ: type[Any] | TypeAliasType, /) -> Generator[None]:
+ """Push a type to the stack."""
+ self._types_stack.append(typ)
+ # Reset the cached property:
+ self.__dict__.pop('types_namespace', None)
+ try:
+ yield
+ finally:
+ self._types_stack.pop()
+ self.__dict__.pop('types_namespace', None)
diff --git a/.venv/lib/python3.12/site-packages/pydantic/_internal/_repr.py b/.venv/lib/python3.12/site-packages/pydantic/_internal/_repr.py
new file mode 100644
index 00000000..de81c8bd
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pydantic/_internal/_repr.py
@@ -0,0 +1,123 @@
+"""Tools to provide pretty/human-readable display of objects."""
+
+from __future__ import annotations as _annotations
+
+import types
+import typing
+from typing import Any
+
+import typing_extensions
+
+from . import _typing_extra
+
+if typing.TYPE_CHECKING:
+ ReprArgs: typing_extensions.TypeAlias = 'typing.Iterable[tuple[str | None, Any]]'
+ RichReprResult: typing_extensions.TypeAlias = (
+ 'typing.Iterable[Any | tuple[Any] | tuple[str, Any] | tuple[str, Any, Any]]'
+ )
+
+
+class PlainRepr(str):
+ """String class where repr doesn't include quotes. Useful with Representation when you want to return a string
+ representation of something that is valid (or pseudo-valid) python.
+ """
+
+ def __repr__(self) -> str:
+ return str(self)
+
+
+class Representation:
+ # Mixin to provide `__str__`, `__repr__`, and `__pretty__` and `__rich_repr__` methods.
+ # `__pretty__` is used by [devtools](https://python-devtools.helpmanual.io/).
+ # `__rich_repr__` is used by [rich](https://rich.readthedocs.io/en/stable/pretty.html).
+ # (this is not a docstring to avoid adding a docstring to classes which inherit from Representation)
+
+ # we don't want to use a type annotation here as it can break get_type_hints
+ __slots__ = () # type: typing.Collection[str]
+
+ def __repr_args__(self) -> ReprArgs:
+ """Returns the attributes to show in __str__, __repr__, and __pretty__ this is generally overridden.
+
+ Can either return:
+ * name - value pairs, e.g.: `[('foo_name', 'foo'), ('bar_name', ['b', 'a', 'r'])]`
+ * or, just values, e.g.: `[(None, 'foo'), (None, ['b', 'a', 'r'])]`
+ """
+ attrs_names = self.__slots__
+ if not attrs_names and hasattr(self, '__dict__'):
+ attrs_names = self.__dict__.keys()
+ attrs = ((s, getattr(self, s)) for s in attrs_names)
+ return [(a, v if v is not self else self.__repr_recursion__(v)) for a, v in attrs if v is not None]
+
+ def __repr_name__(self) -> str:
+ """Name of the instance's class, used in __repr__."""
+ return self.__class__.__name__
+
+ def __repr_recursion__(self, object: Any) -> str:
+ """Returns the string representation of a recursive object."""
+ # This is copied over from the stdlib `pprint` module:
+ return f'<Recursion on {type(object).__name__} with id={id(object)}>'
+
+ def __repr_str__(self, join_str: str) -> str:
+ return join_str.join(repr(v) if a is None else f'{a}={v!r}' for a, v in self.__repr_args__())
+
+ def __pretty__(self, fmt: typing.Callable[[Any], Any], **kwargs: Any) -> typing.Generator[Any, None, None]:
+ """Used by devtools (https://python-devtools.helpmanual.io/) to pretty print objects."""
+ yield self.__repr_name__() + '('
+ yield 1
+ for name, value in self.__repr_args__():
+ if name is not None:
+ yield name + '='
+ yield fmt(value)
+ yield ','
+ yield 0
+ yield -1
+ yield ')'
+
+ def __rich_repr__(self) -> RichReprResult:
+ """Used by Rich (https://rich.readthedocs.io/en/stable/pretty.html) to pretty print objects."""
+ for name, field_repr in self.__repr_args__():
+ if name is None:
+ yield field_repr
+ else:
+ yield name, field_repr
+
+ def __str__(self) -> str:
+ return self.__repr_str__(' ')
+
+ def __repr__(self) -> str:
+ return f'{self.__repr_name__()}({self.__repr_str__(", ")})'
+
+
+def display_as_type(obj: Any) -> str:
+ """Pretty representation of a type, should be as close as possible to the original type definition string.
+
+ Takes some logic from `typing._type_repr`.
+ """
+ if isinstance(obj, (types.FunctionType, types.BuiltinFunctionType)):
+ return obj.__name__
+ elif obj is ...:
+ return '...'
+ elif isinstance(obj, Representation):
+ return repr(obj)
+ elif isinstance(obj, typing.ForwardRef) or _typing_extra.is_type_alias_type(obj):
+ return str(obj)
+
+ if not isinstance(obj, (_typing_extra.typing_base, _typing_extra.WithArgsTypes, type)):
+ obj = obj.__class__
+
+ if _typing_extra.origin_is_union(typing_extensions.get_origin(obj)):
+ args = ', '.join(map(display_as_type, typing_extensions.get_args(obj)))
+ return f'Union[{args}]'
+ elif isinstance(obj, _typing_extra.WithArgsTypes):
+ if _typing_extra.is_literal(obj):
+ args = ', '.join(map(repr, typing_extensions.get_args(obj)))
+ else:
+ args = ', '.join(map(display_as_type, typing_extensions.get_args(obj)))
+ try:
+ return f'{obj.__qualname__}[{args}]'
+ except AttributeError:
+ return str(obj).replace('typing.', '').replace('typing_extensions.', '') # handles TypeAliasType in 3.12
+ elif isinstance(obj, type):
+ return obj.__qualname__
+ else:
+ return repr(obj).replace('typing.', '').replace('typing_extensions.', '')
diff --git a/.venv/lib/python3.12/site-packages/pydantic/_internal/_schema_generation_shared.py b/.venv/lib/python3.12/site-packages/pydantic/_internal/_schema_generation_shared.py
new file mode 100644
index 00000000..a6e3391d
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pydantic/_internal/_schema_generation_shared.py
@@ -0,0 +1,126 @@
+"""Types and utility functions used by various other internal tools."""
+
+from __future__ import annotations
+
+from typing import TYPE_CHECKING, Any, Callable
+
+from pydantic_core import core_schema
+from typing_extensions import Literal
+
+from ..annotated_handlers import GetCoreSchemaHandler, GetJsonSchemaHandler
+
+if TYPE_CHECKING:
+ from ..json_schema import GenerateJsonSchema, JsonSchemaValue
+ from ._core_utils import CoreSchemaOrField
+ from ._generate_schema import GenerateSchema
+ from ._namespace_utils import NamespacesTuple
+
+ GetJsonSchemaFunction = Callable[[CoreSchemaOrField, GetJsonSchemaHandler], JsonSchemaValue]
+ HandlerOverride = Callable[[CoreSchemaOrField], JsonSchemaValue]
+
+
+class GenerateJsonSchemaHandler(GetJsonSchemaHandler):
+ """JsonSchemaHandler implementation that doesn't do ref unwrapping by default.
+
+ This is used for any Annotated metadata so that we don't end up with conflicting
+ modifications to the definition schema.
+
+ Used internally by Pydantic, please do not rely on this implementation.
+ See `GetJsonSchemaHandler` for the handler API.
+ """
+
+ def __init__(self, generate_json_schema: GenerateJsonSchema, handler_override: HandlerOverride | None) -> None:
+ self.generate_json_schema = generate_json_schema
+ self.handler = handler_override or generate_json_schema.generate_inner
+ self.mode = generate_json_schema.mode
+
+ def __call__(self, core_schema: CoreSchemaOrField, /) -> JsonSchemaValue:
+ return self.handler(core_schema)
+
+ def resolve_ref_schema(self, maybe_ref_json_schema: JsonSchemaValue) -> JsonSchemaValue:
+ """Resolves `$ref` in the json schema.
+
+ This returns the input json schema if there is no `$ref` in json schema.
+
+ Args:
+ maybe_ref_json_schema: The input json schema that may contains `$ref`.
+
+ Returns:
+ Resolved json schema.
+
+ Raises:
+ LookupError: If it can't find the definition for `$ref`.
+ """
+ if '$ref' not in maybe_ref_json_schema:
+ return maybe_ref_json_schema
+ ref = maybe_ref_json_schema['$ref']
+ json_schema = self.generate_json_schema.get_schema_from_definitions(ref)
+ if json_schema is None:
+ raise LookupError(
+ f'Could not find a ref for {ref}.'
+ ' Maybe you tried to call resolve_ref_schema from within a recursive model?'
+ )
+ return json_schema
+
+
+class CallbackGetCoreSchemaHandler(GetCoreSchemaHandler):
+ """Wrapper to use an arbitrary function as a `GetCoreSchemaHandler`.
+
+ Used internally by Pydantic, please do not rely on this implementation.
+ See `GetCoreSchemaHandler` for the handler API.
+ """
+
+ def __init__(
+ self,
+ handler: Callable[[Any], core_schema.CoreSchema],
+ generate_schema: GenerateSchema,
+ ref_mode: Literal['to-def', 'unpack'] = 'to-def',
+ ) -> None:
+ self._handler = handler
+ self._generate_schema = generate_schema
+ self._ref_mode = ref_mode
+
+ def __call__(self, source_type: Any, /) -> core_schema.CoreSchema:
+ schema = self._handler(source_type)
+ ref = schema.get('ref')
+ if self._ref_mode == 'to-def':
+ if ref is not None:
+ self._generate_schema.defs.definitions[ref] = schema
+ return core_schema.definition_reference_schema(ref)
+ return schema
+ else: # ref_mode = 'unpack
+ return self.resolve_ref_schema(schema)
+
+ def _get_types_namespace(self) -> NamespacesTuple:
+ return self._generate_schema._types_namespace
+
+ def generate_schema(self, source_type: Any, /) -> core_schema.CoreSchema:
+ return self._generate_schema.generate_schema(source_type)
+
+ @property
+ def field_name(self) -> str | None:
+ return self._generate_schema.field_name_stack.get()
+
+ def resolve_ref_schema(self, maybe_ref_schema: core_schema.CoreSchema) -> core_schema.CoreSchema:
+ """Resolves reference in the core schema.
+
+ Args:
+ maybe_ref_schema: The input core schema that may contains reference.
+
+ Returns:
+ Resolved core schema.
+
+ Raises:
+ LookupError: If it can't find the definition for reference.
+ """
+ if maybe_ref_schema['type'] == 'definition-ref':
+ ref = maybe_ref_schema['schema_ref']
+ if ref not in self._generate_schema.defs.definitions:
+ raise LookupError(
+ f'Could not find a ref for {ref}.'
+ ' Maybe you tried to call resolve_ref_schema from within a recursive model?'
+ )
+ return self._generate_schema.defs.definitions[ref]
+ elif maybe_ref_schema['type'] == 'definitions':
+ return self.resolve_ref_schema(maybe_ref_schema['schema'])
+ return maybe_ref_schema
diff --git a/.venv/lib/python3.12/site-packages/pydantic/_internal/_serializers.py b/.venv/lib/python3.12/site-packages/pydantic/_internal/_serializers.py
new file mode 100644
index 00000000..3e459cf1
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pydantic/_internal/_serializers.py
@@ -0,0 +1,51 @@
+from __future__ import annotations
+
+import collections
+import collections.abc
+import typing
+from typing import Any
+
+from pydantic_core import PydanticOmit, core_schema
+
+SEQUENCE_ORIGIN_MAP: dict[Any, Any] = {
+ typing.Deque: collections.deque,
+ collections.deque: collections.deque,
+ list: list,
+ typing.List: list,
+ set: set,
+ typing.AbstractSet: set,
+ typing.Set: set,
+ frozenset: frozenset,
+ typing.FrozenSet: frozenset,
+ typing.Sequence: list,
+ typing.MutableSequence: list,
+ typing.MutableSet: set,
+ # this doesn't handle subclasses of these
+ # parametrized typing.Set creates one of these
+ collections.abc.MutableSet: set,
+ collections.abc.Set: frozenset,
+}
+
+
+def serialize_sequence_via_list(
+ v: Any, handler: core_schema.SerializerFunctionWrapHandler, info: core_schema.SerializationInfo
+) -> Any:
+ items: list[Any] = []
+
+ mapped_origin = SEQUENCE_ORIGIN_MAP.get(type(v), None)
+ if mapped_origin is None:
+ # we shouldn't hit this branch, should probably add a serialization error or something
+ return v
+
+ for index, item in enumerate(v):
+ try:
+ v = handler(item, index)
+ except PydanticOmit:
+ pass
+ else:
+ items.append(v)
+
+ if info.mode_is_json():
+ return items
+ else:
+ return mapped_origin(items)
diff --git a/.venv/lib/python3.12/site-packages/pydantic/_internal/_signature.py b/.venv/lib/python3.12/site-packages/pydantic/_internal/_signature.py
new file mode 100644
index 00000000..2273577c
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pydantic/_internal/_signature.py
@@ -0,0 +1,188 @@
+from __future__ import annotations
+
+import dataclasses
+from inspect import Parameter, Signature, signature
+from typing import TYPE_CHECKING, Any, Callable
+
+from pydantic_core import PydanticUndefined
+
+from ._utils import is_valid_identifier
+
+if TYPE_CHECKING:
+ from ..config import ExtraValues
+ from ..fields import FieldInfo
+
+
+# Copied over from stdlib dataclasses
+class _HAS_DEFAULT_FACTORY_CLASS:
+ def __repr__(self):
+ return '<factory>'
+
+
+_HAS_DEFAULT_FACTORY = _HAS_DEFAULT_FACTORY_CLASS()
+
+
+def _field_name_for_signature(field_name: str, field_info: FieldInfo) -> str:
+ """Extract the correct name to use for the field when generating a signature.
+
+ Assuming the field has a valid alias, this will return the alias. Otherwise, it will return the field name.
+ First priority is given to the alias, then the validation_alias, then the field name.
+
+ Args:
+ field_name: The name of the field
+ field_info: The corresponding FieldInfo object.
+
+ Returns:
+ The correct name to use when generating a signature.
+ """
+ if isinstance(field_info.alias, str) and is_valid_identifier(field_info.alias):
+ return field_info.alias
+ if isinstance(field_info.validation_alias, str) and is_valid_identifier(field_info.validation_alias):
+ return field_info.validation_alias
+
+ return field_name
+
+
+def _process_param_defaults(param: Parameter) -> Parameter:
+ """Modify the signature for a parameter in a dataclass where the default value is a FieldInfo instance.
+
+ Args:
+ param (Parameter): The parameter
+
+ Returns:
+ Parameter: The custom processed parameter
+ """
+ from ..fields import FieldInfo
+
+ param_default = param.default
+ if isinstance(param_default, FieldInfo):
+ annotation = param.annotation
+ # Replace the annotation if appropriate
+ # inspect does "clever" things to show annotations as strings because we have
+ # `from __future__ import annotations` in main, we don't want that
+ if annotation == 'Any':
+ annotation = Any
+
+ # Replace the field default
+ default = param_default.default
+ if default is PydanticUndefined:
+ if param_default.default_factory is PydanticUndefined:
+ default = Signature.empty
+ else:
+ # this is used by dataclasses to indicate a factory exists:
+ default = dataclasses._HAS_DEFAULT_FACTORY # type: ignore
+ return param.replace(
+ annotation=annotation, name=_field_name_for_signature(param.name, param_default), default=default
+ )
+ return param
+
+
+def _generate_signature_parameters( # noqa: C901 (ignore complexity, could use a refactor)
+ init: Callable[..., None],
+ fields: dict[str, FieldInfo],
+ populate_by_name: bool,
+ extra: ExtraValues | None,
+) -> dict[str, Parameter]:
+ """Generate a mapping of parameter names to Parameter objects for a pydantic BaseModel or dataclass."""
+ from itertools import islice
+
+ present_params = signature(init).parameters.values()
+ merged_params: dict[str, Parameter] = {}
+ var_kw = None
+ use_var_kw = False
+
+ for param in islice(present_params, 1, None): # skip self arg
+ # inspect does "clever" things to show annotations as strings because we have
+ # `from __future__ import annotations` in main, we don't want that
+ if fields.get(param.name):
+ # exclude params with init=False
+ if getattr(fields[param.name], 'init', True) is False:
+ continue
+ param = param.replace(name=_field_name_for_signature(param.name, fields[param.name]))
+ if param.annotation == 'Any':
+ param = param.replace(annotation=Any)
+ if param.kind is param.VAR_KEYWORD:
+ var_kw = param
+ continue
+ merged_params[param.name] = param
+
+ if var_kw: # if custom init has no var_kw, fields which are not declared in it cannot be passed through
+ allow_names = populate_by_name
+ for field_name, field in fields.items():
+ # when alias is a str it should be used for signature generation
+ param_name = _field_name_for_signature(field_name, field)
+
+ if field_name in merged_params or param_name in merged_params:
+ continue
+
+ if not is_valid_identifier(param_name):
+ if allow_names:
+ param_name = field_name
+ else:
+ use_var_kw = True
+ continue
+
+ if field.is_required():
+ default = Parameter.empty
+ elif field.default_factory is not None:
+ # Mimics stdlib dataclasses:
+ default = _HAS_DEFAULT_FACTORY
+ else:
+ default = field.default
+ merged_params[param_name] = Parameter(
+ param_name,
+ Parameter.KEYWORD_ONLY,
+ annotation=field.rebuild_annotation(),
+ default=default,
+ )
+
+ if extra == 'allow':
+ use_var_kw = True
+
+ if var_kw and use_var_kw:
+ # Make sure the parameter for extra kwargs
+ # does not have the same name as a field
+ default_model_signature = [
+ ('self', Parameter.POSITIONAL_ONLY),
+ ('data', Parameter.VAR_KEYWORD),
+ ]
+ if [(p.name, p.kind) for p in present_params] == default_model_signature:
+ # if this is the standard model signature, use extra_data as the extra args name
+ var_kw_name = 'extra_data'
+ else:
+ # else start from var_kw
+ var_kw_name = var_kw.name
+
+ # generate a name that's definitely unique
+ while var_kw_name in fields:
+ var_kw_name += '_'
+ merged_params[var_kw_name] = var_kw.replace(name=var_kw_name)
+
+ return merged_params
+
+
+def generate_pydantic_signature(
+ init: Callable[..., None],
+ fields: dict[str, FieldInfo],
+ populate_by_name: bool,
+ extra: ExtraValues | None,
+ is_dataclass: bool = False,
+) -> Signature:
+ """Generate signature for a pydantic BaseModel or dataclass.
+
+ Args:
+ init: The class init.
+ fields: The model fields.
+ populate_by_name: The `populate_by_name` value of the config.
+ extra: The `extra` value of the config.
+ is_dataclass: Whether the model is a dataclass.
+
+ Returns:
+ The dataclass/BaseModel subclass signature.
+ """
+ merged_params = _generate_signature_parameters(init, fields, populate_by_name, extra)
+
+ if is_dataclass:
+ merged_params = {k: _process_param_defaults(v) for k, v in merged_params.items()}
+
+ return Signature(parameters=list(merged_params.values()), return_annotation=None)
diff --git a/.venv/lib/python3.12/site-packages/pydantic/_internal/_std_types_schema.py b/.venv/lib/python3.12/site-packages/pydantic/_internal/_std_types_schema.py
new file mode 100644
index 00000000..84a7a4a4
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pydantic/_internal/_std_types_schema.py
@@ -0,0 +1,404 @@
+"""Logic for generating pydantic-core schemas for standard library types.
+
+Import of this module is deferred since it contains imports of many standard library modules.
+"""
+
+# TODO: eventually, we'd like to move all of the types handled here to have pydantic-core validators
+# so that we can avoid this annotation injection and just use the standard pydantic-core schema generation
+
+from __future__ import annotations as _annotations
+
+import collections
+import collections.abc
+import dataclasses
+import os
+import typing
+from functools import partial
+from typing import Any, Callable, Iterable, Tuple, TypeVar, cast
+
+import typing_extensions
+from pydantic_core import (
+ CoreSchema,
+ PydanticCustomError,
+ core_schema,
+)
+from typing_extensions import get_args, get_origin
+
+from pydantic._internal._serializers import serialize_sequence_via_list
+from pydantic.errors import PydanticSchemaGenerationError
+from pydantic.types import Strict
+
+from ..json_schema import JsonSchemaValue
+from . import _known_annotated_metadata, _typing_extra
+from ._import_utils import import_cached_field_info
+from ._internal_dataclass import slots_true
+from ._schema_generation_shared import GetCoreSchemaHandler, GetJsonSchemaHandler
+
+FieldInfo = import_cached_field_info()
+
+if typing.TYPE_CHECKING:
+ from ._generate_schema import GenerateSchema
+
+ StdSchemaFunction = Callable[[GenerateSchema, type[Any]], core_schema.CoreSchema]
+
+
+@dataclasses.dataclass(**slots_true)
+class InnerSchemaValidator:
+ """Use a fixed CoreSchema, avoiding interference from outward annotations."""
+
+ core_schema: CoreSchema
+ js_schema: JsonSchemaValue | None = None
+ js_core_schema: CoreSchema | None = None
+ js_schema_update: JsonSchemaValue | None = None
+
+ def __get_pydantic_json_schema__(self, _schema: CoreSchema, handler: GetJsonSchemaHandler) -> JsonSchemaValue:
+ if self.js_schema is not None:
+ return self.js_schema
+ js_schema = handler(self.js_core_schema or self.core_schema)
+ if self.js_schema_update is not None:
+ js_schema.update(self.js_schema_update)
+ return js_schema
+
+ def __get_pydantic_core_schema__(self, _source_type: Any, _handler: GetCoreSchemaHandler) -> CoreSchema:
+ return self.core_schema
+
+
+def path_schema_prepare_pydantic_annotations(
+ source_type: Any, annotations: Iterable[Any]
+) -> tuple[Any, list[Any]] | None:
+ import pathlib
+
+ orig_source_type: Any = get_origin(source_type) or source_type
+ if (
+ (source_type_args := get_args(source_type))
+ and orig_source_type is os.PathLike
+ and source_type_args[0] not in {str, bytes, Any}
+ ):
+ return None
+
+ if orig_source_type not in {
+ os.PathLike,
+ pathlib.Path,
+ pathlib.PurePath,
+ pathlib.PosixPath,
+ pathlib.PurePosixPath,
+ pathlib.PureWindowsPath,
+ }:
+ return None
+
+ metadata, remaining_annotations = _known_annotated_metadata.collect_known_metadata(annotations)
+ _known_annotated_metadata.check_metadata(metadata, _known_annotated_metadata.STR_CONSTRAINTS, orig_source_type)
+
+ is_first_arg_byte = source_type_args and source_type_args[0] is bytes
+ construct_path = pathlib.PurePath if orig_source_type is os.PathLike else orig_source_type
+ constrained_schema = (
+ core_schema.bytes_schema(**metadata) if is_first_arg_byte else core_schema.str_schema(**metadata)
+ )
+
+ def path_validator(input_value: str | bytes) -> os.PathLike[Any]: # type: ignore
+ try:
+ if is_first_arg_byte:
+ if isinstance(input_value, bytes):
+ try:
+ input_value = input_value.decode()
+ except UnicodeDecodeError as e:
+ raise PydanticCustomError('bytes_type', 'Input must be valid bytes') from e
+ else:
+ raise PydanticCustomError('bytes_type', 'Input must be bytes')
+ elif not isinstance(input_value, str):
+ raise PydanticCustomError('path_type', 'Input is not a valid path')
+
+ return construct_path(input_value)
+ except TypeError as e:
+ raise PydanticCustomError('path_type', 'Input is not a valid path') from e
+
+ instance_schema = core_schema.json_or_python_schema(
+ json_schema=core_schema.no_info_after_validator_function(path_validator, constrained_schema),
+ python_schema=core_schema.is_instance_schema(orig_source_type),
+ )
+
+ strict: bool | None = None
+ for annotation in annotations:
+ if isinstance(annotation, Strict):
+ strict = annotation.strict
+
+ schema = core_schema.lax_or_strict_schema(
+ lax_schema=core_schema.union_schema(
+ [
+ instance_schema,
+ core_schema.no_info_after_validator_function(path_validator, constrained_schema),
+ ],
+ custom_error_type='path_type',
+ custom_error_message=f'Input is not a valid path for {orig_source_type}',
+ strict=True,
+ ),
+ strict_schema=instance_schema,
+ serialization=core_schema.to_string_ser_schema(),
+ strict=strict,
+ )
+
+ return (
+ orig_source_type,
+ [
+ InnerSchemaValidator(schema, js_core_schema=constrained_schema, js_schema_update={'format': 'path'}),
+ *remaining_annotations,
+ ],
+ )
+
+
+def deque_validator(
+ input_value: Any, handler: core_schema.ValidatorFunctionWrapHandler, maxlen: None | int
+) -> collections.deque[Any]:
+ if isinstance(input_value, collections.deque):
+ maxlens = [v for v in (input_value.maxlen, maxlen) if v is not None]
+ if maxlens:
+ maxlen = min(maxlens)
+ return collections.deque(handler(input_value), maxlen=maxlen)
+ else:
+ return collections.deque(handler(input_value), maxlen=maxlen)
+
+
+@dataclasses.dataclass(**slots_true)
+class DequeValidator:
+ item_source_type: type[Any]
+ metadata: dict[str, Any]
+
+ def __get_pydantic_core_schema__(self, source_type: Any, handler: GetCoreSchemaHandler) -> CoreSchema:
+ if _typing_extra.is_any(self.item_source_type):
+ items_schema = None
+ else:
+ items_schema = handler.generate_schema(self.item_source_type)
+
+ # if we have a MaxLen annotation might as well set that as the default maxlen on the deque
+ # this lets us reuse existing metadata annotations to let users set the maxlen on a dequeue
+ # that e.g. comes from JSON
+ coerce_instance_wrap = partial(
+ core_schema.no_info_wrap_validator_function,
+ partial(deque_validator, maxlen=self.metadata.get('max_length', None)),
+ )
+
+ # we have to use a lax list schema here, because we need to validate the deque's
+ # items via a list schema, but it's ok if the deque itself is not a list
+ metadata_with_strict_override = {**self.metadata, 'strict': False}
+ constrained_schema = core_schema.list_schema(items_schema, **metadata_with_strict_override)
+
+ check_instance = core_schema.json_or_python_schema(
+ json_schema=core_schema.list_schema(),
+ python_schema=core_schema.is_instance_schema(collections.deque),
+ )
+
+ serialization = core_schema.wrap_serializer_function_ser_schema(
+ serialize_sequence_via_list, schema=items_schema or core_schema.any_schema(), info_arg=True
+ )
+
+ strict = core_schema.chain_schema([check_instance, coerce_instance_wrap(constrained_schema)])
+
+ if self.metadata.get('strict', False):
+ schema = strict
+ else:
+ lax = coerce_instance_wrap(constrained_schema)
+ schema = core_schema.lax_or_strict_schema(lax_schema=lax, strict_schema=strict)
+ schema['serialization'] = serialization
+
+ return schema
+
+
+def deque_schema_prepare_pydantic_annotations(
+ source_type: Any, annotations: Iterable[Any]
+) -> tuple[Any, list[Any]] | None:
+ args = get_args(source_type)
+
+ if not args:
+ args = typing.cast(Tuple[Any], (Any,))
+ elif len(args) != 1:
+ raise ValueError('Expected deque to have exactly 1 generic parameter')
+
+ item_source_type = args[0]
+
+ metadata, remaining_annotations = _known_annotated_metadata.collect_known_metadata(annotations)
+ _known_annotated_metadata.check_metadata(metadata, _known_annotated_metadata.SEQUENCE_CONSTRAINTS, source_type)
+
+ return (source_type, [DequeValidator(item_source_type, metadata), *remaining_annotations])
+
+
+MAPPING_ORIGIN_MAP: dict[Any, Any] = {
+ typing.DefaultDict: collections.defaultdict,
+ collections.defaultdict: collections.defaultdict,
+ collections.OrderedDict: collections.OrderedDict,
+ typing_extensions.OrderedDict: collections.OrderedDict,
+ dict: dict,
+ typing.Dict: dict,
+ collections.Counter: collections.Counter,
+ typing.Counter: collections.Counter,
+ # this doesn't handle subclasses of these
+ typing.Mapping: dict,
+ typing.MutableMapping: dict,
+ # parametrized typing.{Mutable}Mapping creates one of these
+ collections.abc.MutableMapping: dict,
+ collections.abc.Mapping: dict,
+}
+
+
+def defaultdict_validator(
+ input_value: Any, handler: core_schema.ValidatorFunctionWrapHandler, default_default_factory: Callable[[], Any]
+) -> collections.defaultdict[Any, Any]:
+ if isinstance(input_value, collections.defaultdict):
+ default_factory = input_value.default_factory
+ return collections.defaultdict(default_factory, handler(input_value))
+ else:
+ return collections.defaultdict(default_default_factory, handler(input_value))
+
+
+def get_defaultdict_default_default_factory(values_source_type: Any) -> Callable[[], Any]:
+ def infer_default() -> Callable[[], Any]:
+ allowed_default_types: dict[Any, Any] = {
+ typing.Tuple: tuple,
+ tuple: tuple,
+ collections.abc.Sequence: tuple,
+ collections.abc.MutableSequence: list,
+ typing.List: list,
+ list: list,
+ typing.Sequence: list,
+ typing.Set: set,
+ set: set,
+ typing.MutableSet: set,
+ collections.abc.MutableSet: set,
+ collections.abc.Set: frozenset,
+ typing.MutableMapping: dict,
+ typing.Mapping: dict,
+ collections.abc.Mapping: dict,
+ collections.abc.MutableMapping: dict,
+ float: float,
+ int: int,
+ str: str,
+ bool: bool,
+ }
+ values_type_origin = get_origin(values_source_type) or values_source_type
+ instructions = 'set using `DefaultDict[..., Annotated[..., Field(default_factory=...)]]`'
+ if isinstance(values_type_origin, TypeVar):
+
+ def type_var_default_factory() -> None:
+ raise RuntimeError(
+ 'Generic defaultdict cannot be used without a concrete value type or an'
+ ' explicit default factory, ' + instructions
+ )
+
+ return type_var_default_factory
+ elif values_type_origin not in allowed_default_types:
+ # a somewhat subjective set of types that have reasonable default values
+ allowed_msg = ', '.join([t.__name__ for t in set(allowed_default_types.values())])
+ raise PydanticSchemaGenerationError(
+ f'Unable to infer a default factory for keys of type {values_source_type}.'
+ f' Only {allowed_msg} are supported, other types require an explicit default factory'
+ ' ' + instructions
+ )
+ return allowed_default_types[values_type_origin]
+
+ # Assume Annotated[..., Field(...)]
+ if _typing_extra.is_annotated(values_source_type):
+ field_info = next((v for v in get_args(values_source_type) if isinstance(v, FieldInfo)), None)
+ else:
+ field_info = None
+ if field_info and field_info.default_factory:
+ # Assume the default factory does not take any argument:
+ default_default_factory = cast(Callable[[], Any], field_info.default_factory)
+ else:
+ default_default_factory = infer_default()
+ return default_default_factory
+
+
+@dataclasses.dataclass(**slots_true)
+class MappingValidator:
+ mapped_origin: type[Any]
+ keys_source_type: type[Any]
+ values_source_type: type[Any]
+ min_length: int | None = None
+ max_length: int | None = None
+ strict: bool = False
+
+ def serialize_mapping_via_dict(self, v: Any, handler: core_schema.SerializerFunctionWrapHandler) -> Any:
+ return handler(v)
+
+ def __get_pydantic_core_schema__(self, source_type: Any, handler: GetCoreSchemaHandler) -> CoreSchema:
+ if _typing_extra.is_any(self.keys_source_type):
+ keys_schema = None
+ else:
+ keys_schema = handler.generate_schema(self.keys_source_type)
+ if _typing_extra.is_any(self.values_source_type):
+ values_schema = None
+ else:
+ values_schema = handler.generate_schema(self.values_source_type)
+
+ metadata = {'min_length': self.min_length, 'max_length': self.max_length, 'strict': self.strict}
+
+ if self.mapped_origin is dict:
+ schema = core_schema.dict_schema(keys_schema, values_schema, **metadata)
+ else:
+ constrained_schema = core_schema.dict_schema(keys_schema, values_schema, **metadata)
+ check_instance = core_schema.json_or_python_schema(
+ json_schema=core_schema.dict_schema(),
+ python_schema=core_schema.is_instance_schema(self.mapped_origin),
+ )
+
+ if self.mapped_origin is collections.defaultdict:
+ default_default_factory = get_defaultdict_default_default_factory(self.values_source_type)
+ coerce_instance_wrap = partial(
+ core_schema.no_info_wrap_validator_function,
+ partial(defaultdict_validator, default_default_factory=default_default_factory),
+ )
+ else:
+ coerce_instance_wrap = partial(core_schema.no_info_after_validator_function, self.mapped_origin)
+
+ serialization = core_schema.wrap_serializer_function_ser_schema(
+ self.serialize_mapping_via_dict,
+ schema=core_schema.dict_schema(
+ keys_schema or core_schema.any_schema(), values_schema or core_schema.any_schema()
+ ),
+ info_arg=False,
+ )
+
+ strict = core_schema.chain_schema([check_instance, coerce_instance_wrap(constrained_schema)])
+
+ if metadata.get('strict', False):
+ schema = strict
+ else:
+ lax = coerce_instance_wrap(constrained_schema)
+ schema = core_schema.lax_or_strict_schema(lax_schema=lax, strict_schema=strict)
+ schema['serialization'] = serialization
+
+ return schema
+
+
+def mapping_like_prepare_pydantic_annotations(
+ source_type: Any, annotations: Iterable[Any]
+) -> tuple[Any, list[Any]] | None:
+ origin: Any = get_origin(source_type)
+
+ mapped_origin = MAPPING_ORIGIN_MAP.get(origin, None) if origin else MAPPING_ORIGIN_MAP.get(source_type, None)
+ if mapped_origin is None:
+ return None
+
+ args = get_args(source_type)
+
+ if not args:
+ args = typing.cast(Tuple[Any, Any], (Any, Any))
+ elif mapped_origin is collections.Counter:
+ # a single generic
+ if len(args) != 1:
+ raise ValueError('Expected Counter to have exactly 1 generic parameter')
+ args = (args[0], int) # keys are always an int
+ elif len(args) != 2:
+ raise ValueError('Expected mapping to have exactly 2 generic parameters')
+
+ keys_source_type, values_source_type = args
+
+ metadata, remaining_annotations = _known_annotated_metadata.collect_known_metadata(annotations)
+ _known_annotated_metadata.check_metadata(metadata, _known_annotated_metadata.SEQUENCE_CONSTRAINTS, source_type)
+
+ return (
+ source_type,
+ [
+ MappingValidator(mapped_origin, keys_source_type, values_source_type, **metadata),
+ *remaining_annotations,
+ ],
+ )
diff --git a/.venv/lib/python3.12/site-packages/pydantic/_internal/_typing_extra.py b/.venv/lib/python3.12/site-packages/pydantic/_internal/_typing_extra.py
new file mode 100644
index 00000000..399c8c46
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pydantic/_internal/_typing_extra.py
@@ -0,0 +1,893 @@
+"""Logic for interacting with type annotations, mostly extensions, shims and hacks to wrap Python's typing module."""
+
+from __future__ import annotations
+
+import collections.abc
+import re
+import sys
+import types
+import typing
+import warnings
+from functools import lru_cache, partial
+from typing import TYPE_CHECKING, Any, Callable
+
+import typing_extensions
+from typing_extensions import TypeIs, deprecated, get_args, get_origin
+
+from ._namespace_utils import GlobalsNamespace, MappingNamespace, NsResolver, get_module_ns_of
+
+if sys.version_info < (3, 10):
+ NoneType = type(None)
+ EllipsisType = type(Ellipsis)
+else:
+ from types import EllipsisType as EllipsisType
+ from types import NoneType as NoneType
+
+if TYPE_CHECKING:
+ from pydantic import BaseModel
+
+# See https://typing-extensions.readthedocs.io/en/latest/#runtime-use-of-types:
+
+
+@lru_cache(maxsize=None)
+def _get_typing_objects_by_name_of(name: str) -> tuple[Any, ...]:
+ """Get the member named `name` from both `typing` and `typing-extensions` (if it exists)."""
+ result = tuple(getattr(module, name) for module in (typing, typing_extensions) if hasattr(module, name))
+ if not result:
+ raise ValueError(f'Neither `typing` nor `typing_extensions` has an object called {name!r}')
+ return result
+
+
+# As suggested by the `typing-extensions` documentation, we could apply caching to this method,
+# but it doesn't seem to improve performance. This also requires `obj` to be hashable, which
+# might not be always the case:
+def _is_typing_name(obj: object, name: str) -> bool:
+ """Return whether `obj` is the member of the typing modules (includes the `typing-extensions` one) named `name`."""
+ # Using `any()` is slower:
+ for thing in _get_typing_objects_by_name_of(name):
+ if obj is thing:
+ return True
+ return False
+
+
+def is_any(tp: Any, /) -> bool:
+ """Return whether the provided argument is the `Any` special form.
+
+ ```python {test="skip" lint="skip"}
+ is_any(Any)
+ #> True
+ ```
+ """
+ return _is_typing_name(tp, name='Any')
+
+
+def is_union(tp: Any, /) -> bool:
+ """Return whether the provided argument is a `Union` special form.
+
+ ```python {test="skip" lint="skip"}
+ is_union(Union[int, str])
+ #> True
+ is_union(int | str)
+ #> False
+ ```
+ """
+ return _is_typing_name(get_origin(tp), name='Union')
+
+
+def is_literal(tp: Any, /) -> bool:
+ """Return whether the provided argument is a `Literal` special form.
+
+ ```python {test="skip" lint="skip"}
+ is_literal(Literal[42])
+ #> True
+ ```
+ """
+ return _is_typing_name(get_origin(tp), name='Literal')
+
+
+# TODO remove and replace with `get_args` when we drop support for Python 3.8
+# (see https://docs.python.org/3/whatsnew/3.9.html#id4).
+def literal_values(tp: Any, /) -> list[Any]:
+ """Return the values contained in the provided `Literal` special form."""
+ if not is_literal(tp):
+ return [tp]
+
+ values = get_args(tp)
+ return [x for value in values for x in literal_values(value)]
+
+
+def is_annotated(tp: Any, /) -> bool:
+ """Return whether the provided argument is a `Annotated` special form.
+
+ ```python {test="skip" lint="skip"}
+ is_annotated(Annotated[int, ...])
+ #> True
+ ```
+ """
+ return _is_typing_name(get_origin(tp), name='Annotated')
+
+
+def annotated_type(tp: Any, /) -> Any | None:
+ """Return the type of the `Annotated` special form, or `None`."""
+ return get_args(tp)[0] if is_annotated(tp) else None
+
+
+def is_unpack(tp: Any, /) -> bool:
+ """Return whether the provided argument is a `Unpack` special form.
+
+ ```python {test="skip" lint="skip"}
+ is_unpack(Unpack[Ts])
+ #> True
+ ```
+ """
+ return _is_typing_name(get_origin(tp), name='Unpack')
+
+
+def unpack_type(tp: Any, /) -> Any | None:
+ """Return the type wrapped by the `Unpack` special form, or `None`."""
+ return get_args(tp)[0] if is_unpack(tp) else None
+
+
+def is_self(tp: Any, /) -> bool:
+ """Return whether the provided argument is the `Self` special form.
+
+ ```python {test="skip" lint="skip"}
+ is_self(Self)
+ #> True
+ ```
+ """
+ return _is_typing_name(tp, name='Self')
+
+
+def is_new_type(tp: Any, /) -> bool:
+ """Return whether the provided argument is a `NewType`.
+
+ ```python {test="skip" lint="skip"}
+ is_new_type(NewType('MyInt', int))
+ #> True
+ ```
+ """
+ if sys.version_info < (3, 10):
+ # On Python < 3.10, `typing.NewType` is a function
+ return hasattr(tp, '__supertype__')
+ else:
+ return _is_typing_name(type(tp), name='NewType')
+
+
+def is_hashable(tp: Any, /) -> bool:
+ """Return whether the provided argument is the `Hashable` class.
+
+ ```python {test="skip" lint="skip"}
+ is_hashable(Hashable)
+ #> True
+ ```
+ """
+ # `get_origin` is documented as normalizing any typing-module aliases to `collections` classes,
+ # hence the second check:
+ return tp is collections.abc.Hashable or get_origin(tp) is collections.abc.Hashable
+
+
+def is_callable(tp: Any, /) -> bool:
+ """Return whether the provided argument is a `Callable`, parametrized or not.
+
+ ```python {test="skip" lint="skip"}
+ is_callable(Callable[[int], str])
+ #> True
+ is_callable(typing.Callable)
+ #> True
+ is_callable(collections.abc.Callable)
+ #> True
+ ```
+ """
+ # `get_origin` is documented as normalizing any typing-module aliases to `collections` classes,
+ # hence the second check:
+ return tp is collections.abc.Callable or get_origin(tp) is collections.abc.Callable
+
+
+_PARAMSPEC_TYPES: tuple[type[typing_extensions.ParamSpec], ...] = (typing_extensions.ParamSpec,)
+if sys.version_info >= (3, 10):
+ _PARAMSPEC_TYPES = (*_PARAMSPEC_TYPES, typing.ParamSpec) # pyright: ignore[reportAssignmentType]
+
+
+def is_paramspec(tp: Any, /) -> bool:
+ """Return whether the provided argument is a `ParamSpec`.
+
+ ```python {test="skip" lint="skip"}
+ P = ParamSpec('P')
+ is_paramspec(P)
+ #> True
+ ```
+ """
+ return isinstance(tp, _PARAMSPEC_TYPES)
+
+
+_TYPE_ALIAS_TYPES: tuple[type[typing_extensions.TypeAliasType], ...] = (typing_extensions.TypeAliasType,)
+if sys.version_info >= (3, 12):
+ _TYPE_ALIAS_TYPES = (*_TYPE_ALIAS_TYPES, typing.TypeAliasType)
+
+
+def is_type_alias_type(tp: Any, /) -> TypeIs[typing_extensions.TypeAliasType]:
+ """Return whether the provided argument is an instance of `TypeAliasType`.
+
+ ```python {test="skip" lint="skip"}
+ type Int = int
+ is_type_alias_type(Int)
+ #> True
+ Str = TypeAliasType('Str', str)
+ is_type_alias_type(Str)
+ #> True
+ ```
+ """
+ return isinstance(tp, _TYPE_ALIAS_TYPES)
+
+
+def is_classvar(tp: Any, /) -> bool:
+ """Return whether the provided argument is a `ClassVar` special form, parametrized or not.
+
+ Note that in most cases, you will want to use the `is_classvar_annotation` function,
+ which is used to check if an annotation (in the context of a Pydantic model or dataclass)
+ should be treated as being a class variable.
+
+ ```python {test="skip" lint="skip"}
+ is_classvar(ClassVar[int])
+ #> True
+ is_classvar(ClassVar)
+ #> True
+ """
+ # ClassVar is not necessarily parametrized:
+ return _is_typing_name(tp, name='ClassVar') or _is_typing_name(get_origin(tp), name='ClassVar')
+
+
+_classvar_re = re.compile(r'((\w+\.)?Annotated\[)?(\w+\.)?ClassVar\[')
+
+
+def is_classvar_annotation(tp: Any, /) -> bool:
+ """Return whether the provided argument represents a class variable annotation.
+
+ Although not explicitly stated by the typing specification, `ClassVar` can be used
+ inside `Annotated` and as such, this function checks for this specific scenario.
+
+ Because this function is used to detect class variables before evaluating forward references
+ (or because evaluation failed), we also implement a naive regex match implementation. This is
+ required because class variables are inspected before fields are collected, so we try to be
+ as accurate as possible.
+ """
+ if is_classvar(tp) or (anntp := annotated_type(tp)) is not None and is_classvar(anntp):
+ return True
+
+ str_ann: str | None = None
+ if isinstance(tp, typing.ForwardRef):
+ str_ann = tp.__forward_arg__
+ if isinstance(tp, str):
+ str_ann = tp
+
+ if str_ann is not None and _classvar_re.match(str_ann):
+ # stdlib dataclasses do something similar, although a bit more advanced
+ # (see `dataclass._is_type`).
+ return True
+
+ return False
+
+
+# TODO implement `is_finalvar_annotation` as Final can be wrapped with other special forms:
+def is_finalvar(tp: Any, /) -> bool:
+ """Return whether the provided argument is a `Final` special form, parametrized or not.
+
+ ```python {test="skip" lint="skip"}
+ is_finalvar(Final[int])
+ #> True
+ is_finalvar(Final)
+ #> True
+ """
+ # Final is not necessarily parametrized:
+ return _is_typing_name(tp, name='Final') or _is_typing_name(get_origin(tp), name='Final')
+
+
+def is_required(tp: Any, /) -> bool:
+ """Return whether the provided argument is a `Required` special form.
+
+ ```python {test="skip" lint="skip"}
+ is_required(Required[int])
+ #> True
+ """
+ return _is_typing_name(get_origin(tp), name='Required')
+
+
+def is_not_required(tp: Any, /) -> bool:
+ """Return whether the provided argument is a `NotRequired` special form.
+
+ ```python {test="skip" lint="skip"}
+ is_required(Required[int])
+ #> True
+ """
+ return _is_typing_name(get_origin(tp), name='NotRequired')
+
+
+def is_no_return(tp: Any, /) -> bool:
+ """Return whether the provided argument is the `NoReturn` special form.
+
+ ```python {test="skip" lint="skip"}
+ is_no_return(NoReturn)
+ #> True
+ ```
+ """
+ return _is_typing_name(tp, name='NoReturn')
+
+
+def is_never(tp: Any, /) -> bool:
+ """Return whether the provided argument is the `Never` special form.
+
+ ```python {test="skip" lint="skip"}
+ is_never(Never)
+ #> True
+ ```
+ """
+ return _is_typing_name(tp, name='Never')
+
+
+_DEPRECATED_TYPES: tuple[type[typing_extensions.deprecated], ...] = (typing_extensions.deprecated,)
+if hasattr(warnings, 'deprecated'):
+ _DEPRECATED_TYPES = (*_DEPRECATED_TYPES, warnings.deprecated) # pyright: ignore[reportAttributeAccessIssue]
+
+
+def is_deprecated_instance(obj: Any, /) -> TypeIs[deprecated]:
+ """Return whether the argument is an instance of the `warnings.deprecated` class or the `typing_extensions` backport."""
+ return isinstance(obj, _DEPRECATED_TYPES)
+
+
+_NONE_TYPES: tuple[Any, ...] = (None, NoneType, typing.Literal[None], typing_extensions.Literal[None])
+
+
+def is_none_type(tp: Any, /) -> bool:
+ """Return whether the argument represents the `None` type as part of an annotation.
+
+ ```python {test="skip" lint="skip"}
+ is_none_type(None)
+ #> True
+ is_none_type(NoneType)
+ #> True
+ is_none_type(Literal[None])
+ #> True
+ is_none_type(type[None])
+ #> False
+ """
+ return tp in _NONE_TYPES
+
+
+def is_namedtuple(tp: Any, /) -> bool:
+ """Return whether the provided argument is a named tuple class.
+
+ The class can be created using `typing.NamedTuple` or `collections.namedtuple`.
+ Parametrized generic classes are *not* assumed to be named tuples.
+ """
+ from ._utils import lenient_issubclass # circ. import
+
+ return lenient_issubclass(tp, tuple) and hasattr(tp, '_fields')
+
+
+if sys.version_info < (3, 9):
+
+ def is_zoneinfo_type(tp: Any, /) -> bool:
+ """Return whether the provided argument is the `zoneinfo.ZoneInfo` type."""
+ return False
+
+else:
+ from zoneinfo import ZoneInfo
+
+ def is_zoneinfo_type(tp: Any, /) -> TypeIs[type[ZoneInfo]]:
+ """Return whether the provided argument is the `zoneinfo.ZoneInfo` type."""
+ return tp is ZoneInfo
+
+
+if sys.version_info < (3, 10):
+
+ def origin_is_union(tp: Any, /) -> bool:
+ """Return whether the provided argument is the `Union` special form."""
+ return _is_typing_name(tp, name='Union')
+
+ def is_generic_alias(type_: type[Any]) -> bool:
+ return isinstance(type_, typing._GenericAlias) # pyright: ignore[reportAttributeAccessIssue]
+
+else:
+
+ def origin_is_union(tp: Any, /) -> bool:
+ """Return whether the provided argument is the `Union` special form or the `UnionType`."""
+ return _is_typing_name(tp, name='Union') or tp is types.UnionType
+
+ def is_generic_alias(tp: Any, /) -> bool:
+ return isinstance(tp, (types.GenericAlias, typing._GenericAlias)) # pyright: ignore[reportAttributeAccessIssue]
+
+
+# TODO: Ideally, we should avoid relying on the private `typing` constructs:
+
+if sys.version_info < (3, 9):
+ WithArgsTypes: tuple[Any, ...] = (typing._GenericAlias,) # pyright: ignore[reportAttributeAccessIssue]
+elif sys.version_info < (3, 10):
+ WithArgsTypes: tuple[Any, ...] = (typing._GenericAlias, types.GenericAlias) # pyright: ignore[reportAttributeAccessIssue]
+else:
+ WithArgsTypes: tuple[Any, ...] = (typing._GenericAlias, types.GenericAlias, types.UnionType) # pyright: ignore[reportAttributeAccessIssue]
+
+
+# Similarly, we shouldn't rely on this `_Final` class, which is even more private than `_GenericAlias`:
+typing_base: Any = typing._Final # pyright: ignore[reportAttributeAccessIssue]
+
+
+### Annotation evaluations functions:
+
+
+def parent_frame_namespace(*, parent_depth: int = 2, force: bool = False) -> dict[str, Any] | None:
+ """We allow use of items in parent namespace to get around the issue with `get_type_hints` only looking in the
+ global module namespace. See https://github.com/pydantic/pydantic/issues/2678#issuecomment-1008139014 -> Scope
+ and suggestion at the end of the next comment by @gvanrossum.
+
+ WARNING 1: it matters exactly where this is called. By default, this function will build a namespace from the
+ parent of where it is called.
+
+ WARNING 2: this only looks in the parent namespace, not other parents since (AFAIK) there's no way to collect a
+ dict of exactly what's in scope. Using `f_back` would work sometimes but would be very wrong and confusing in many
+ other cases. See https://discuss.python.org/t/is-there-a-way-to-access-parent-nested-namespaces/20659.
+
+ There are some cases where we want to force fetching the parent namespace, ex: during a `model_rebuild` call.
+ In this case, we want both the namespace of the class' module, if applicable, and the parent namespace of the
+ module where the rebuild is called.
+
+ In other cases, like during initial schema build, if a class is defined at the top module level, we don't need to
+ fetch that module's namespace, because the class' __module__ attribute can be used to access the parent namespace.
+ This is done in `_namespace_utils.get_module_ns_of`. Thus, there's no need to cache the parent frame namespace in this case.
+ """
+ frame = sys._getframe(parent_depth)
+
+ # note, we don't copy frame.f_locals here (or during the last return call), because we don't expect the namespace to be modified down the line
+ # if this becomes a problem, we could implement some sort of frozen mapping structure to enforce this
+ if force:
+ return frame.f_locals
+
+ # if either of the following conditions are true, the class is defined at the top module level
+ # to better understand why we need both of these checks, see
+ # https://github.com/pydantic/pydantic/pull/10113#discussion_r1714981531
+ if frame.f_back is None or frame.f_code.co_name == '<module>':
+ return None
+
+ return frame.f_locals
+
+
+def _type_convert(arg: Any) -> Any:
+ """Convert `None` to `NoneType` and strings to `ForwardRef` instances.
+
+ This is a backport of the private `typing._type_convert` function. When
+ evaluating a type, `ForwardRef._evaluate` ends up being called, and is
+ responsible for making this conversion. However, we still have to apply
+ it for the first argument passed to our type evaluation functions, similarly
+ to the `typing.get_type_hints` function.
+ """
+ if arg is None:
+ return NoneType
+ if isinstance(arg, str):
+ # Like `typing.get_type_hints`, assume the arg can be in any context,
+ # hence the proper `is_argument` and `is_class` args:
+ return _make_forward_ref(arg, is_argument=False, is_class=True)
+ return arg
+
+
+def get_model_type_hints(
+ obj: type[BaseModel],
+ *,
+ ns_resolver: NsResolver | None = None,
+) -> dict[str, tuple[Any, bool]]:
+ """Collect annotations from a Pydantic model class, including those from parent classes.
+
+ Args:
+ obj: The Pydantic model to inspect.
+ ns_resolver: A namespace resolver instance to use. Defaults to an empty instance.
+
+ Returns:
+ A dictionary mapping annotation names to a two-tuple: the first element is the evaluated
+ type or the original annotation if a `NameError` occurred, the second element is a boolean
+ indicating if whether the evaluation succeeded.
+ """
+ hints: dict[str, Any] | dict[str, tuple[Any, bool]] = {}
+ ns_resolver = ns_resolver or NsResolver()
+
+ for base in reversed(obj.__mro__):
+ ann: dict[str, Any] | None = base.__dict__.get('__annotations__')
+ if not ann or isinstance(ann, types.GetSetDescriptorType):
+ continue
+ with ns_resolver.push(base):
+ globalns, localns = ns_resolver.types_namespace
+ for name, value in ann.items():
+ if name.startswith('_'):
+ # For private attributes, we only need the annotation to detect the `ClassVar` special form.
+ # For this reason, we still try to evaluate it, but we also catch any possible exception (on
+ # top of the `NameError`s caught in `try_eval_type`) that could happen so that users are free
+ # to use any kind of forward annotation for private fields (e.g. circular imports, new typing
+ # syntax, etc).
+ try:
+ hints[name] = try_eval_type(value, globalns, localns)
+ except Exception:
+ hints[name] = (value, False)
+ else:
+ hints[name] = try_eval_type(value, globalns, localns)
+ return hints
+
+
+def get_cls_type_hints(
+ obj: type[Any],
+ *,
+ ns_resolver: NsResolver | None = None,
+) -> dict[str, Any]:
+ """Collect annotations from a class, including those from parent classes.
+
+ Args:
+ obj: The class to inspect.
+ ns_resolver: A namespace resolver instance to use. Defaults to an empty instance.
+ """
+ hints: dict[str, Any] | dict[str, tuple[Any, bool]] = {}
+ ns_resolver = ns_resolver or NsResolver()
+
+ for base in reversed(obj.__mro__):
+ ann: dict[str, Any] | None = base.__dict__.get('__annotations__')
+ if not ann or isinstance(ann, types.GetSetDescriptorType):
+ continue
+ with ns_resolver.push(base):
+ globalns, localns = ns_resolver.types_namespace
+ for name, value in ann.items():
+ hints[name] = eval_type(value, globalns, localns)
+ return hints
+
+
+def try_eval_type(
+ value: Any,
+ globalns: GlobalsNamespace | None = None,
+ localns: MappingNamespace | None = None,
+) -> tuple[Any, bool]:
+ """Try evaluating the annotation using the provided namespaces.
+
+ Args:
+ value: The value to evaluate. If `None`, it will be replaced by `type[None]`. If an instance
+ of `str`, it will be converted to a `ForwardRef`.
+ localns: The global namespace to use during annotation evaluation.
+ globalns: The local namespace to use during annotation evaluation.
+
+ Returns:
+ A two-tuple containing the possibly evaluated type and a boolean indicating
+ whether the evaluation succeeded or not.
+ """
+ value = _type_convert(value)
+
+ try:
+ return eval_type_backport(value, globalns, localns), True
+ except NameError:
+ return value, False
+
+
+def eval_type(
+ value: Any,
+ globalns: GlobalsNamespace | None = None,
+ localns: MappingNamespace | None = None,
+) -> Any:
+ """Evaluate the annotation using the provided namespaces.
+
+ Args:
+ value: The value to evaluate. If `None`, it will be replaced by `type[None]`. If an instance
+ of `str`, it will be converted to a `ForwardRef`.
+ localns: The global namespace to use during annotation evaluation.
+ globalns: The local namespace to use during annotation evaluation.
+ """
+ value = _type_convert(value)
+ return eval_type_backport(value, globalns, localns)
+
+
+@deprecated(
+ '`eval_type_lenient` is deprecated, use `try_eval_type` instead.',
+ category=None,
+)
+def eval_type_lenient(
+ value: Any,
+ globalns: GlobalsNamespace | None = None,
+ localns: MappingNamespace | None = None,
+) -> Any:
+ ev, _ = try_eval_type(value, globalns, localns)
+ return ev
+
+
+def eval_type_backport(
+ value: Any,
+ globalns: GlobalsNamespace | None = None,
+ localns: MappingNamespace | None = None,
+ type_params: tuple[Any, ...] | None = None,
+) -> Any:
+ """An enhanced version of `typing._eval_type` which will fall back to using the `eval_type_backport`
+ package if it's installed to let older Python versions use newer typing constructs.
+
+ Specifically, this transforms `X | Y` into `typing.Union[X, Y]` and `list[X]` into `typing.List[X]`
+ (as well as all the types made generic in PEP 585) if the original syntax is not supported in the
+ current Python version.
+
+ This function will also display a helpful error if the value passed fails to evaluate.
+ """
+ try:
+ return _eval_type_backport(value, globalns, localns, type_params)
+ except TypeError as e:
+ if 'Unable to evaluate type annotation' in str(e):
+ raise
+
+ # If it is a `TypeError` and value isn't a `ForwardRef`, it would have failed during annotation definition.
+ # Thus we assert here for type checking purposes:
+ assert isinstance(value, typing.ForwardRef)
+
+ message = f'Unable to evaluate type annotation {value.__forward_arg__!r}.'
+ if sys.version_info >= (3, 11):
+ e.add_note(message)
+ raise
+ else:
+ raise TypeError(message) from e
+
+
+def _eval_type_backport(
+ value: Any,
+ globalns: GlobalsNamespace | None = None,
+ localns: MappingNamespace | None = None,
+ type_params: tuple[Any, ...] | None = None,
+) -> Any:
+ try:
+ return _eval_type(value, globalns, localns, type_params)
+ except TypeError as e:
+ if not (isinstance(value, typing.ForwardRef) and is_backport_fixable_error(e)):
+ raise
+
+ try:
+ from eval_type_backport import eval_type_backport
+ except ImportError:
+ raise TypeError(
+ f'Unable to evaluate type annotation {value.__forward_arg__!r}. If you are making use '
+ 'of the new typing syntax (unions using `|` since Python 3.10 or builtins subscripting '
+ 'since Python 3.9), you should either replace the use of new syntax with the existing '
+ '`typing` constructs or install the `eval_type_backport` package.'
+ ) from e
+
+ return eval_type_backport(
+ value,
+ globalns,
+ localns, # pyright: ignore[reportArgumentType], waiting on a new `eval_type_backport` release.
+ try_default=False,
+ )
+
+
+def _eval_type(
+ value: Any,
+ globalns: GlobalsNamespace | None = None,
+ localns: MappingNamespace | None = None,
+ type_params: tuple[Any, ...] | None = None,
+) -> Any:
+ if sys.version_info >= (3, 13):
+ return typing._eval_type( # type: ignore
+ value, globalns, localns, type_params=type_params
+ )
+ else:
+ return typing._eval_type( # type: ignore
+ value, globalns, localns
+ )
+
+
+def is_backport_fixable_error(e: TypeError) -> bool:
+ msg = str(e)
+
+ return (
+ sys.version_info < (3, 10)
+ and msg.startswith('unsupported operand type(s) for |: ')
+ or sys.version_info < (3, 9)
+ and "' object is not subscriptable" in msg
+ )
+
+
+def get_function_type_hints(
+ function: Callable[..., Any],
+ *,
+ include_keys: set[str] | None = None,
+ globalns: GlobalsNamespace | None = None,
+ localns: MappingNamespace | None = None,
+) -> dict[str, Any]:
+ """Return type hints for a function.
+
+ This is similar to the `typing.get_type_hints` function, with a few differences:
+ - Support `functools.partial` by using the underlying `func` attribute.
+ - If `function` happens to be a built-in type (e.g. `int`), assume it doesn't have annotations
+ but specify the `return` key as being the actual type.
+ - Do not wrap type annotation of a parameter with `Optional` if it has a default value of `None`
+ (related bug: https://github.com/python/cpython/issues/90353, only fixed in 3.11+).
+ """
+ try:
+ if isinstance(function, partial):
+ annotations = function.func.__annotations__
+ else:
+ annotations = function.__annotations__
+ except AttributeError:
+ type_hints = get_type_hints(function)
+ if isinstance(function, type):
+ # `type[...]` is a callable, which returns an instance of itself.
+ # At some point, we might even look into the return type of `__new__`
+ # if it returns something else.
+ type_hints.setdefault('return', function)
+ return type_hints
+
+ if globalns is None:
+ globalns = get_module_ns_of(function)
+ type_params: tuple[Any, ...] | None = None
+ if localns is None:
+ # If localns was specified, it is assumed to already contain type params. This is because
+ # Pydantic has more advanced logic to do so (see `_namespace_utils.ns_for_function`).
+ type_params = getattr(function, '__type_params__', ())
+
+ type_hints = {}
+ for name, value in annotations.items():
+ if include_keys is not None and name not in include_keys:
+ continue
+ if value is None:
+ value = NoneType
+ elif isinstance(value, str):
+ value = _make_forward_ref(value)
+
+ type_hints[name] = eval_type_backport(value, globalns, localns, type_params)
+
+ return type_hints
+
+
+if sys.version_info < (3, 9, 8) or (3, 10) <= sys.version_info < (3, 10, 1):
+
+ def _make_forward_ref(
+ arg: Any,
+ is_argument: bool = True,
+ *,
+ is_class: bool = False,
+ ) -> typing.ForwardRef:
+ """Wrapper for ForwardRef that accounts for the `is_class` argument missing in older versions.
+ The `module` argument is omitted as it breaks <3.9.8, =3.10.0 and isn't used in the calls below.
+
+ See https://github.com/python/cpython/pull/28560 for some background.
+ The backport happened on 3.9.8, see:
+ https://github.com/pydantic/pydantic/discussions/6244#discussioncomment-6275458,
+ and on 3.10.1 for the 3.10 branch, see:
+ https://github.com/pydantic/pydantic/issues/6912
+
+ Implemented as EAFP with memory.
+ """
+ return typing.ForwardRef(arg, is_argument)
+
+else:
+ _make_forward_ref = typing.ForwardRef
+
+
+if sys.version_info >= (3, 10):
+ get_type_hints = typing.get_type_hints
+
+else:
+ """
+ For older versions of python, we have a custom implementation of `get_type_hints` which is a close as possible to
+ the implementation in CPython 3.10.8.
+ """
+
+ @typing.no_type_check
+ def get_type_hints( # noqa: C901
+ obj: Any,
+ globalns: dict[str, Any] | None = None,
+ localns: dict[str, Any] | None = None,
+ include_extras: bool = False,
+ ) -> dict[str, Any]: # pragma: no cover
+ """Taken verbatim from python 3.10.8 unchanged, except:
+ * type annotations of the function definition above.
+ * prefixing `typing.` where appropriate
+ * Use `_make_forward_ref` instead of `typing.ForwardRef` to handle the `is_class` argument.
+
+ https://github.com/python/cpython/blob/aaaf5174241496afca7ce4d4584570190ff972fe/Lib/typing.py#L1773-L1875
+
+ DO NOT CHANGE THIS METHOD UNLESS ABSOLUTELY NECESSARY.
+ ======================================================
+
+ Return type hints for an object.
+
+ This is often the same as obj.__annotations__, but it handles
+ forward references encoded as string literals, adds Optional[t] if a
+ default value equal to None is set and recursively replaces all
+ 'Annotated[T, ...]' with 'T' (unless 'include_extras=True').
+
+ The argument may be a module, class, method, or function. The annotations
+ are returned as a dictionary. For classes, annotations include also
+ inherited members.
+
+ TypeError is raised if the argument is not of a type that can contain
+ annotations, and an empty dictionary is returned if no annotations are
+ present.
+
+ BEWARE -- the behavior of globalns and localns is counterintuitive
+ (unless you are familiar with how eval() and exec() work). The
+ search order is locals first, then globals.
+
+ - If no dict arguments are passed, an attempt is made to use the
+ globals from obj (or the respective module's globals for classes),
+ and these are also used as the locals. If the object does not appear
+ to have globals, an empty dictionary is used. For classes, the search
+ order is globals first then locals.
+
+ - If one dict argument is passed, it is used for both globals and
+ locals.
+
+ - If two dict arguments are passed, they specify globals and
+ locals, respectively.
+ """
+ if getattr(obj, '__no_type_check__', None):
+ return {}
+ # Classes require a special treatment.
+ if isinstance(obj, type):
+ hints = {}
+ for base in reversed(obj.__mro__):
+ if globalns is None:
+ base_globals = getattr(sys.modules.get(base.__module__, None), '__dict__', {})
+ else:
+ base_globals = globalns
+ ann = base.__dict__.get('__annotations__', {})
+ if isinstance(ann, types.GetSetDescriptorType):
+ ann = {}
+ base_locals = dict(vars(base)) if localns is None else localns
+ if localns is None and globalns is None:
+ # This is surprising, but required. Before Python 3.10,
+ # get_type_hints only evaluated the globalns of
+ # a class. To maintain backwards compatibility, we reverse
+ # the globalns and localns order so that eval() looks into
+ # *base_globals* first rather than *base_locals*.
+ # This only affects ForwardRefs.
+ base_globals, base_locals = base_locals, base_globals
+ for name, value in ann.items():
+ if value is None:
+ value = type(None)
+ if isinstance(value, str):
+ value = _make_forward_ref(value, is_argument=False, is_class=True)
+
+ value = eval_type_backport(value, base_globals, base_locals)
+ hints[name] = value
+ if not include_extras and hasattr(typing, '_strip_annotations'):
+ return {
+ k: typing._strip_annotations(t) # type: ignore
+ for k, t in hints.items()
+ }
+ else:
+ return hints
+
+ if globalns is None:
+ if isinstance(obj, types.ModuleType):
+ globalns = obj.__dict__
+ else:
+ nsobj = obj
+ # Find globalns for the unwrapped object.
+ while hasattr(nsobj, '__wrapped__'):
+ nsobj = nsobj.__wrapped__
+ globalns = getattr(nsobj, '__globals__', {})
+ if localns is None:
+ localns = globalns
+ elif localns is None:
+ localns = globalns
+ hints = getattr(obj, '__annotations__', None)
+ if hints is None:
+ # Return empty annotations for something that _could_ have them.
+ if isinstance(obj, typing._allowed_types): # type: ignore
+ return {}
+ else:
+ raise TypeError(f'{obj!r} is not a module, class, method, ' 'or function.')
+ defaults = typing._get_defaults(obj) # type: ignore
+ hints = dict(hints)
+ for name, value in hints.items():
+ if value is None:
+ value = type(None)
+ if isinstance(value, str):
+ # class-level forward refs were handled above, this must be either
+ # a module-level annotation or a function argument annotation
+
+ value = _make_forward_ref(
+ value,
+ is_argument=not isinstance(obj, types.ModuleType),
+ is_class=False,
+ )
+ value = eval_type_backport(value, globalns, localns)
+ if name in defaults and defaults[name] is None:
+ value = typing.Optional[value]
+ hints[name] = value
+ return hints if include_extras else {k: typing._strip_annotations(t) for k, t in hints.items()} # type: ignore
diff --git a/.venv/lib/python3.12/site-packages/pydantic/_internal/_utils.py b/.venv/lib/python3.12/site-packages/pydantic/_internal/_utils.py
new file mode 100644
index 00000000..861271b7
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pydantic/_internal/_utils.py
@@ -0,0 +1,389 @@
+"""Bucket of reusable internal utilities.
+
+This should be reduced as much as possible with functions only used in one place, moved to that place.
+"""
+
+from __future__ import annotations as _annotations
+
+import dataclasses
+import keyword
+import typing
+import weakref
+from collections import OrderedDict, defaultdict, deque
+from copy import deepcopy
+from functools import cached_property
+from inspect import Parameter
+from itertools import zip_longest
+from types import BuiltinFunctionType, CodeType, FunctionType, GeneratorType, LambdaType, ModuleType
+from typing import Any, Callable, Mapping, TypeVar
+
+from typing_extensions import TypeAlias, TypeGuard
+
+from . import _repr, _typing_extra
+from ._import_utils import import_cached_base_model
+
+if typing.TYPE_CHECKING:
+ MappingIntStrAny: TypeAlias = 'typing.Mapping[int, Any] | typing.Mapping[str, Any]'
+ AbstractSetIntStr: TypeAlias = 'typing.AbstractSet[int] | typing.AbstractSet[str]'
+ from ..main import BaseModel
+
+
+# these are types that are returned unchanged by deepcopy
+IMMUTABLE_NON_COLLECTIONS_TYPES: set[type[Any]] = {
+ int,
+ float,
+ complex,
+ str,
+ bool,
+ bytes,
+ type,
+ _typing_extra.NoneType,
+ FunctionType,
+ BuiltinFunctionType,
+ LambdaType,
+ weakref.ref,
+ CodeType,
+ # note: including ModuleType will differ from behaviour of deepcopy by not producing error.
+ # It might be not a good idea in general, but considering that this function used only internally
+ # against default values of fields, this will allow to actually have a field with module as default value
+ ModuleType,
+ NotImplemented.__class__,
+ Ellipsis.__class__,
+}
+
+# these are types that if empty, might be copied with simple copy() instead of deepcopy()
+BUILTIN_COLLECTIONS: set[type[Any]] = {
+ list,
+ set,
+ tuple,
+ frozenset,
+ dict,
+ OrderedDict,
+ defaultdict,
+ deque,
+}
+
+
+def can_be_positional(param: Parameter) -> bool:
+ """Return whether the parameter accepts a positional argument.
+
+ ```python {test="skip" lint="skip"}
+ def func(a, /, b, *, c):
+ pass
+
+ params = inspect.signature(func).parameters
+ can_be_positional(params['a'])
+ #> True
+ can_be_positional(params['b'])
+ #> True
+ can_be_positional(params['c'])
+ #> False
+ ```
+ """
+ return param.kind in (Parameter.POSITIONAL_ONLY, Parameter.POSITIONAL_OR_KEYWORD)
+
+
+def sequence_like(v: Any) -> bool:
+ return isinstance(v, (list, tuple, set, frozenset, GeneratorType, deque))
+
+
+def lenient_isinstance(o: Any, class_or_tuple: type[Any] | tuple[type[Any], ...] | None) -> bool: # pragma: no cover
+ try:
+ return isinstance(o, class_or_tuple) # type: ignore[arg-type]
+ except TypeError:
+ return False
+
+
+def lenient_issubclass(cls: Any, class_or_tuple: Any) -> bool: # pragma: no cover
+ try:
+ return isinstance(cls, type) and issubclass(cls, class_or_tuple)
+ except TypeError:
+ if isinstance(cls, _typing_extra.WithArgsTypes):
+ return False
+ raise # pragma: no cover
+
+
+def is_model_class(cls: Any) -> TypeGuard[type[BaseModel]]:
+ """Returns true if cls is a _proper_ subclass of BaseModel, and provides proper type-checking,
+ unlike raw calls to lenient_issubclass.
+ """
+ BaseModel = import_cached_base_model()
+
+ return lenient_issubclass(cls, BaseModel) and cls is not BaseModel
+
+
+def is_valid_identifier(identifier: str) -> bool:
+ """Checks that a string is a valid identifier and not a Python keyword.
+ :param identifier: The identifier to test.
+ :return: True if the identifier is valid.
+ """
+ return identifier.isidentifier() and not keyword.iskeyword(identifier)
+
+
+KeyType = TypeVar('KeyType')
+
+
+def deep_update(mapping: dict[KeyType, Any], *updating_mappings: dict[KeyType, Any]) -> dict[KeyType, Any]:
+ updated_mapping = mapping.copy()
+ for updating_mapping in updating_mappings:
+ for k, v in updating_mapping.items():
+ if k in updated_mapping and isinstance(updated_mapping[k], dict) and isinstance(v, dict):
+ updated_mapping[k] = deep_update(updated_mapping[k], v)
+ else:
+ updated_mapping[k] = v
+ return updated_mapping
+
+
+def update_not_none(mapping: dict[Any, Any], **update: Any) -> None:
+ mapping.update({k: v for k, v in update.items() if v is not None})
+
+
+T = TypeVar('T')
+
+
+def unique_list(
+ input_list: list[T] | tuple[T, ...],
+ *,
+ name_factory: typing.Callable[[T], str] = str,
+) -> list[T]:
+ """Make a list unique while maintaining order.
+ We update the list if another one with the same name is set
+ (e.g. model validator overridden in subclass).
+ """
+ result: list[T] = []
+ result_names: list[str] = []
+ for v in input_list:
+ v_name = name_factory(v)
+ if v_name not in result_names:
+ result_names.append(v_name)
+ result.append(v)
+ else:
+ result[result_names.index(v_name)] = v
+
+ return result
+
+
+class ValueItems(_repr.Representation):
+ """Class for more convenient calculation of excluded or included fields on values."""
+
+ __slots__ = ('_items', '_type')
+
+ def __init__(self, value: Any, items: AbstractSetIntStr | MappingIntStrAny) -> None:
+ items = self._coerce_items(items)
+
+ if isinstance(value, (list, tuple)):
+ items = self._normalize_indexes(items, len(value)) # type: ignore
+
+ self._items: MappingIntStrAny = items # type: ignore
+
+ def is_excluded(self, item: Any) -> bool:
+ """Check if item is fully excluded.
+
+ :param item: key or index of a value
+ """
+ return self.is_true(self._items.get(item))
+
+ def is_included(self, item: Any) -> bool:
+ """Check if value is contained in self._items.
+
+ :param item: key or index of value
+ """
+ return item in self._items
+
+ def for_element(self, e: int | str) -> AbstractSetIntStr | MappingIntStrAny | None:
+ """:param e: key or index of element on value
+ :return: raw values for element if self._items is dict and contain needed element
+ """
+ item = self._items.get(e) # type: ignore
+ return item if not self.is_true(item) else None
+
+ def _normalize_indexes(self, items: MappingIntStrAny, v_length: int) -> dict[int | str, Any]:
+ """:param items: dict or set of indexes which will be normalized
+ :param v_length: length of sequence indexes of which will be
+
+ >>> self._normalize_indexes({0: True, -2: True, -1: True}, 4)
+ {0: True, 2: True, 3: True}
+ >>> self._normalize_indexes({'__all__': True}, 4)
+ {0: True, 1: True, 2: True, 3: True}
+ """
+ normalized_items: dict[int | str, Any] = {}
+ all_items = None
+ for i, v in items.items():
+ if not (isinstance(v, typing.Mapping) or isinstance(v, typing.AbstractSet) or self.is_true(v)):
+ raise TypeError(f'Unexpected type of exclude value for index "{i}" {v.__class__}')
+ if i == '__all__':
+ all_items = self._coerce_value(v)
+ continue
+ if not isinstance(i, int):
+ raise TypeError(
+ 'Excluding fields from a sequence of sub-models or dicts must be performed index-wise: '
+ 'expected integer keys or keyword "__all__"'
+ )
+ normalized_i = v_length + i if i < 0 else i
+ normalized_items[normalized_i] = self.merge(v, normalized_items.get(normalized_i))
+
+ if not all_items:
+ return normalized_items
+ if self.is_true(all_items):
+ for i in range(v_length):
+ normalized_items.setdefault(i, ...)
+ return normalized_items
+ for i in range(v_length):
+ normalized_item = normalized_items.setdefault(i, {})
+ if not self.is_true(normalized_item):
+ normalized_items[i] = self.merge(all_items, normalized_item)
+ return normalized_items
+
+ @classmethod
+ def merge(cls, base: Any, override: Any, intersect: bool = False) -> Any:
+ """Merge a `base` item with an `override` item.
+
+ Both `base` and `override` are converted to dictionaries if possible.
+ Sets are converted to dictionaries with the sets entries as keys and
+ Ellipsis as values.
+
+ Each key-value pair existing in `base` is merged with `override`,
+ while the rest of the key-value pairs are updated recursively with this function.
+
+ Merging takes place based on the "union" of keys if `intersect` is
+ set to `False` (default) and on the intersection of keys if
+ `intersect` is set to `True`.
+ """
+ override = cls._coerce_value(override)
+ base = cls._coerce_value(base)
+ if override is None:
+ return base
+ if cls.is_true(base) or base is None:
+ return override
+ if cls.is_true(override):
+ return base if intersect else override
+
+ # intersection or union of keys while preserving ordering:
+ if intersect:
+ merge_keys = [k for k in base if k in override] + [k for k in override if k in base]
+ else:
+ merge_keys = list(base) + [k for k in override if k not in base]
+
+ merged: dict[int | str, Any] = {}
+ for k in merge_keys:
+ merged_item = cls.merge(base.get(k), override.get(k), intersect=intersect)
+ if merged_item is not None:
+ merged[k] = merged_item
+
+ return merged
+
+ @staticmethod
+ def _coerce_items(items: AbstractSetIntStr | MappingIntStrAny) -> MappingIntStrAny:
+ if isinstance(items, typing.Mapping):
+ pass
+ elif isinstance(items, typing.AbstractSet):
+ items = dict.fromkeys(items, ...) # type: ignore
+ else:
+ class_name = getattr(items, '__class__', '???')
+ raise TypeError(f'Unexpected type of exclude value {class_name}')
+ return items # type: ignore
+
+ @classmethod
+ def _coerce_value(cls, value: Any) -> Any:
+ if value is None or cls.is_true(value):
+ return value
+ return cls._coerce_items(value)
+
+ @staticmethod
+ def is_true(v: Any) -> bool:
+ return v is True or v is ...
+
+ def __repr_args__(self) -> _repr.ReprArgs:
+ return [(None, self._items)]
+
+
+if typing.TYPE_CHECKING:
+
+ def LazyClassAttribute(name: str, get_value: Callable[[], T]) -> T: ...
+
+else:
+
+ class LazyClassAttribute:
+ """A descriptor exposing an attribute only accessible on a class (hidden from instances).
+
+ The attribute is lazily computed and cached during the first access.
+ """
+
+ def __init__(self, name: str, get_value: Callable[[], Any]) -> None:
+ self.name = name
+ self.get_value = get_value
+
+ @cached_property
+ def value(self) -> Any:
+ return self.get_value()
+
+ def __get__(self, instance: Any, owner: type[Any]) -> None:
+ if instance is None:
+ return self.value
+ raise AttributeError(f'{self.name!r} attribute of {owner.__name__!r} is class-only')
+
+
+Obj = TypeVar('Obj')
+
+
+def smart_deepcopy(obj: Obj) -> Obj:
+ """Return type as is for immutable built-in types
+ Use obj.copy() for built-in empty collections
+ Use copy.deepcopy() for non-empty collections and unknown objects.
+ """
+ obj_type = obj.__class__
+ if obj_type in IMMUTABLE_NON_COLLECTIONS_TYPES:
+ return obj # fastest case: obj is immutable and not collection therefore will not be copied anyway
+ try:
+ if not obj and obj_type in BUILTIN_COLLECTIONS:
+ # faster way for empty collections, no need to copy its members
+ return obj if obj_type is tuple else obj.copy() # tuple doesn't have copy method # type: ignore
+ except (TypeError, ValueError, RuntimeError):
+ # do we really dare to catch ALL errors? Seems a bit risky
+ pass
+
+ return deepcopy(obj) # slowest way when we actually might need a deepcopy
+
+
+_SENTINEL = object()
+
+
+def all_identical(left: typing.Iterable[Any], right: typing.Iterable[Any]) -> bool:
+ """Check that the items of `left` are the same objects as those in `right`.
+
+ >>> a, b = object(), object()
+ >>> all_identical([a, b, a], [a, b, a])
+ True
+ >>> all_identical([a, b, [a]], [a, b, [a]]) # new list object, while "equal" is not "identical"
+ False
+ """
+ for left_item, right_item in zip_longest(left, right, fillvalue=_SENTINEL):
+ if left_item is not right_item:
+ return False
+ return True
+
+
+@dataclasses.dataclass(frozen=True)
+class SafeGetItemProxy:
+ """Wrapper redirecting `__getitem__` to `get` with a sentinel value as default
+
+ This makes is safe to use in `operator.itemgetter` when some keys may be missing
+ """
+
+ # Define __slots__manually for performances
+ # @dataclasses.dataclass() only support slots=True in python>=3.10
+ __slots__ = ('wrapped',)
+
+ wrapped: Mapping[str, Any]
+
+ def __getitem__(self, key: str, /) -> Any:
+ return self.wrapped.get(key, _SENTINEL)
+
+ # required to pass the object to operator.itemgetter() instances due to a quirk of typeshed
+ # https://github.com/python/mypy/issues/13713
+ # https://github.com/python/typeshed/pull/8785
+ # Since this is typing-only, hide it in a typing.TYPE_CHECKING block
+ if typing.TYPE_CHECKING:
+
+ def __contains__(self, key: str, /) -> bool:
+ return self.wrapped.__contains__(key)
diff --git a/.venv/lib/python3.12/site-packages/pydantic/_internal/_validate_call.py b/.venv/lib/python3.12/site-packages/pydantic/_internal/_validate_call.py
new file mode 100644
index 00000000..f04da826
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pydantic/_internal/_validate_call.py
@@ -0,0 +1,115 @@
+from __future__ import annotations as _annotations
+
+import functools
+import inspect
+from functools import partial
+from typing import Any, Awaitable, Callable
+
+import pydantic_core
+
+from ..config import ConfigDict
+from ..plugin._schema_validator import create_schema_validator
+from ._config import ConfigWrapper
+from ._generate_schema import GenerateSchema, ValidateCallSupportedTypes
+from ._namespace_utils import MappingNamespace, NsResolver, ns_for_function
+
+
+def extract_function_name(func: ValidateCallSupportedTypes) -> str:
+ """Extract the name of a `ValidateCallSupportedTypes` object."""
+ return f'partial({func.func.__name__})' if isinstance(func, functools.partial) else func.__name__
+
+
+def extract_function_qualname(func: ValidateCallSupportedTypes) -> str:
+ """Extract the qualname of a `ValidateCallSupportedTypes` object."""
+ return f'partial({func.func.__qualname__})' if isinstance(func, functools.partial) else func.__qualname__
+
+
+def update_wrapper_attributes(wrapped: ValidateCallSupportedTypes, wrapper: Callable[..., Any]):
+ """Update the `wrapper` function with the attributes of the `wrapped` function. Return the updated function."""
+ if inspect.iscoroutinefunction(wrapped):
+
+ @functools.wraps(wrapped)
+ async def wrapper_function(*args, **kwargs): # type: ignore
+ return await wrapper(*args, **kwargs)
+ else:
+
+ @functools.wraps(wrapped)
+ def wrapper_function(*args, **kwargs):
+ return wrapper(*args, **kwargs)
+
+ # We need to manually update this because `partial` object has no `__name__` and `__qualname__`.
+ wrapper_function.__name__ = extract_function_name(wrapped)
+ wrapper_function.__qualname__ = extract_function_qualname(wrapped)
+ wrapper_function.raw_function = wrapped # type: ignore
+
+ return wrapper_function
+
+
+class ValidateCallWrapper:
+ """This is a wrapper around a function that validates the arguments passed to it, and optionally the return value."""
+
+ __slots__ = ('__pydantic_validator__', '__return_pydantic_validator__')
+
+ def __init__(
+ self,
+ function: ValidateCallSupportedTypes,
+ config: ConfigDict | None,
+ validate_return: bool,
+ parent_namespace: MappingNamespace | None,
+ ) -> None:
+ if isinstance(function, partial):
+ schema_type = function.func
+ module = function.func.__module__
+ else:
+ schema_type = function
+ module = function.__module__
+ qualname = extract_function_qualname(function)
+
+ ns_resolver = NsResolver(namespaces_tuple=ns_for_function(schema_type, parent_namespace=parent_namespace))
+
+ config_wrapper = ConfigWrapper(config)
+ gen_schema = GenerateSchema(config_wrapper, ns_resolver)
+ schema = gen_schema.clean_schema(gen_schema.generate_schema(function))
+ core_config = config_wrapper.core_config(title=qualname)
+
+ self.__pydantic_validator__ = create_schema_validator(
+ schema,
+ schema_type,
+ module,
+ qualname,
+ 'validate_call',
+ core_config,
+ config_wrapper.plugin_settings,
+ )
+
+ if validate_return:
+ signature = inspect.signature(function)
+ return_type = signature.return_annotation if signature.return_annotation is not signature.empty else Any
+ gen_schema = GenerateSchema(config_wrapper, ns_resolver)
+ schema = gen_schema.clean_schema(gen_schema.generate_schema(return_type))
+ validator = create_schema_validator(
+ schema,
+ schema_type,
+ module,
+ qualname,
+ 'validate_call',
+ core_config,
+ config_wrapper.plugin_settings,
+ )
+ if inspect.iscoroutinefunction(function):
+
+ async def return_val_wrapper(aw: Awaitable[Any]) -> None:
+ return validator.validate_python(await aw)
+
+ self.__return_pydantic_validator__ = return_val_wrapper
+ else:
+ self.__return_pydantic_validator__ = validator.validate_python
+ else:
+ self.__return_pydantic_validator__ = None
+
+ def __call__(self, *args: Any, **kwargs: Any) -> Any:
+ res = self.__pydantic_validator__.validate_python(pydantic_core.ArgsKwargs(args, kwargs))
+ if self.__return_pydantic_validator__:
+ return self.__return_pydantic_validator__(res)
+ else:
+ return res
diff --git a/.venv/lib/python3.12/site-packages/pydantic/_internal/_validators.py b/.venv/lib/python3.12/site-packages/pydantic/_internal/_validators.py
new file mode 100644
index 00000000..5d165c04
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pydantic/_internal/_validators.py
@@ -0,0 +1,424 @@
+"""Validator functions for standard library types.
+
+Import of this module is deferred since it contains imports of many standard library modules.
+"""
+
+from __future__ import annotations as _annotations
+
+import math
+import re
+import typing
+from decimal import Decimal
+from fractions import Fraction
+from ipaddress import IPv4Address, IPv4Interface, IPv4Network, IPv6Address, IPv6Interface, IPv6Network
+from typing import Any, Callable, Union
+
+from pydantic_core import PydanticCustomError, core_schema
+from pydantic_core._pydantic_core import PydanticKnownError
+
+
+def sequence_validator(
+ input_value: typing.Sequence[Any],
+ /,
+ validator: core_schema.ValidatorFunctionWrapHandler,
+) -> typing.Sequence[Any]:
+ """Validator for `Sequence` types, isinstance(v, Sequence) has already been called."""
+ value_type = type(input_value)
+
+ # We don't accept any plain string as a sequence
+ # Relevant issue: https://github.com/pydantic/pydantic/issues/5595
+ if issubclass(value_type, (str, bytes)):
+ raise PydanticCustomError(
+ 'sequence_str',
+ "'{type_name}' instances are not allowed as a Sequence value",
+ {'type_name': value_type.__name__},
+ )
+
+ # TODO: refactor sequence validation to validate with either a list or a tuple
+ # schema, depending on the type of the value.
+ # Additionally, we should be able to remove one of either this validator or the
+ # SequenceValidator in _std_types_schema.py (preferably this one, while porting over some logic).
+ # Effectively, a refactor for sequence validation is needed.
+ if value_type is tuple:
+ input_value = list(input_value)
+
+ v_list = validator(input_value)
+
+ # the rest of the logic is just re-creating the original type from `v_list`
+ if value_type is list:
+ return v_list
+ elif issubclass(value_type, range):
+ # return the list as we probably can't re-create the range
+ return v_list
+ elif value_type is tuple:
+ return tuple(v_list)
+ else:
+ # best guess at how to re-create the original type, more custom construction logic might be required
+ return value_type(v_list) # type: ignore[call-arg]
+
+
+def import_string(value: Any) -> Any:
+ if isinstance(value, str):
+ try:
+ return _import_string_logic(value)
+ except ImportError as e:
+ raise PydanticCustomError('import_error', 'Invalid python path: {error}', {'error': str(e)}) from e
+ else:
+ # otherwise we just return the value and let the next validator do the rest of the work
+ return value
+
+
+def _import_string_logic(dotted_path: str) -> Any:
+ """Inspired by uvicorn — dotted paths should include a colon before the final item if that item is not a module.
+ (This is necessary to distinguish between a submodule and an attribute when there is a conflict.).
+
+ If the dotted path does not include a colon and the final item is not a valid module, importing as an attribute
+ rather than a submodule will be attempted automatically.
+
+ So, for example, the following values of `dotted_path` result in the following returned values:
+ * 'collections': <module 'collections'>
+ * 'collections.abc': <module 'collections.abc'>
+ * 'collections.abc:Mapping': <class 'collections.abc.Mapping'>
+ * `collections.abc.Mapping`: <class 'collections.abc.Mapping'> (though this is a bit slower than the previous line)
+
+ An error will be raised under any of the following scenarios:
+ * `dotted_path` contains more than one colon (e.g., 'collections:abc:Mapping')
+ * the substring of `dotted_path` before the colon is not a valid module in the environment (e.g., '123:Mapping')
+ * the substring of `dotted_path` after the colon is not an attribute of the module (e.g., 'collections:abc123')
+ """
+ from importlib import import_module
+
+ components = dotted_path.strip().split(':')
+ if len(components) > 2:
+ raise ImportError(f"Import strings should have at most one ':'; received {dotted_path!r}")
+
+ module_path = components[0]
+ if not module_path:
+ raise ImportError(f'Import strings should have a nonempty module name; received {dotted_path!r}')
+
+ try:
+ module = import_module(module_path)
+ except ModuleNotFoundError as e:
+ if '.' in module_path:
+ # Check if it would be valid if the final item was separated from its module with a `:`
+ maybe_module_path, maybe_attribute = dotted_path.strip().rsplit('.', 1)
+ try:
+ return _import_string_logic(f'{maybe_module_path}:{maybe_attribute}')
+ except ImportError:
+ pass
+ raise ImportError(f'No module named {module_path!r}') from e
+ raise e
+
+ if len(components) > 1:
+ attribute = components[1]
+ try:
+ return getattr(module, attribute)
+ except AttributeError as e:
+ raise ImportError(f'cannot import name {attribute!r} from {module_path!r}') from e
+ else:
+ return module
+
+
+def pattern_either_validator(input_value: Any, /) -> typing.Pattern[Any]:
+ if isinstance(input_value, typing.Pattern):
+ return input_value
+ elif isinstance(input_value, (str, bytes)):
+ # todo strict mode
+ return compile_pattern(input_value) # type: ignore
+ else:
+ raise PydanticCustomError('pattern_type', 'Input should be a valid pattern')
+
+
+def pattern_str_validator(input_value: Any, /) -> typing.Pattern[str]:
+ if isinstance(input_value, typing.Pattern):
+ if isinstance(input_value.pattern, str):
+ return input_value
+ else:
+ raise PydanticCustomError('pattern_str_type', 'Input should be a string pattern')
+ elif isinstance(input_value, str):
+ return compile_pattern(input_value)
+ elif isinstance(input_value, bytes):
+ raise PydanticCustomError('pattern_str_type', 'Input should be a string pattern')
+ else:
+ raise PydanticCustomError('pattern_type', 'Input should be a valid pattern')
+
+
+def pattern_bytes_validator(input_value: Any, /) -> typing.Pattern[bytes]:
+ if isinstance(input_value, typing.Pattern):
+ if isinstance(input_value.pattern, bytes):
+ return input_value
+ else:
+ raise PydanticCustomError('pattern_bytes_type', 'Input should be a bytes pattern')
+ elif isinstance(input_value, bytes):
+ return compile_pattern(input_value)
+ elif isinstance(input_value, str):
+ raise PydanticCustomError('pattern_bytes_type', 'Input should be a bytes pattern')
+ else:
+ raise PydanticCustomError('pattern_type', 'Input should be a valid pattern')
+
+
+PatternType = typing.TypeVar('PatternType', str, bytes)
+
+
+def compile_pattern(pattern: PatternType) -> typing.Pattern[PatternType]:
+ try:
+ return re.compile(pattern)
+ except re.error:
+ raise PydanticCustomError('pattern_regex', 'Input should be a valid regular expression')
+
+
+def ip_v4_address_validator(input_value: Any, /) -> IPv4Address:
+ if isinstance(input_value, IPv4Address):
+ return input_value
+
+ try:
+ return IPv4Address(input_value)
+ except ValueError:
+ raise PydanticCustomError('ip_v4_address', 'Input is not a valid IPv4 address')
+
+
+def ip_v6_address_validator(input_value: Any, /) -> IPv6Address:
+ if isinstance(input_value, IPv6Address):
+ return input_value
+
+ try:
+ return IPv6Address(input_value)
+ except ValueError:
+ raise PydanticCustomError('ip_v6_address', 'Input is not a valid IPv6 address')
+
+
+def ip_v4_network_validator(input_value: Any, /) -> IPv4Network:
+ """Assume IPv4Network initialised with a default `strict` argument.
+
+ See more:
+ https://docs.python.org/library/ipaddress.html#ipaddress.IPv4Network
+ """
+ if isinstance(input_value, IPv4Network):
+ return input_value
+
+ try:
+ return IPv4Network(input_value)
+ except ValueError:
+ raise PydanticCustomError('ip_v4_network', 'Input is not a valid IPv4 network')
+
+
+def ip_v6_network_validator(input_value: Any, /) -> IPv6Network:
+ """Assume IPv6Network initialised with a default `strict` argument.
+
+ See more:
+ https://docs.python.org/library/ipaddress.html#ipaddress.IPv6Network
+ """
+ if isinstance(input_value, IPv6Network):
+ return input_value
+
+ try:
+ return IPv6Network(input_value)
+ except ValueError:
+ raise PydanticCustomError('ip_v6_network', 'Input is not a valid IPv6 network')
+
+
+def ip_v4_interface_validator(input_value: Any, /) -> IPv4Interface:
+ if isinstance(input_value, IPv4Interface):
+ return input_value
+
+ try:
+ return IPv4Interface(input_value)
+ except ValueError:
+ raise PydanticCustomError('ip_v4_interface', 'Input is not a valid IPv4 interface')
+
+
+def ip_v6_interface_validator(input_value: Any, /) -> IPv6Interface:
+ if isinstance(input_value, IPv6Interface):
+ return input_value
+
+ try:
+ return IPv6Interface(input_value)
+ except ValueError:
+ raise PydanticCustomError('ip_v6_interface', 'Input is not a valid IPv6 interface')
+
+
+def fraction_validator(input_value: Any, /) -> Fraction:
+ if isinstance(input_value, Fraction):
+ return input_value
+
+ try:
+ return Fraction(input_value)
+ except ValueError:
+ raise PydanticCustomError('fraction_parsing', 'Input is not a valid fraction')
+
+
+def forbid_inf_nan_check(x: Any) -> Any:
+ if not math.isfinite(x):
+ raise PydanticKnownError('finite_number')
+ return x
+
+
+def _safe_repr(v: Any) -> int | float | str:
+ """The context argument for `PydanticKnownError` requires a number or str type, so we do a simple repr() coercion for types like timedelta.
+
+ See tests/test_types.py::test_annotated_metadata_any_order for some context.
+ """
+ if isinstance(v, (int, float, str)):
+ return v
+ return repr(v)
+
+
+def greater_than_validator(x: Any, gt: Any) -> Any:
+ try:
+ if not (x > gt):
+ raise PydanticKnownError('greater_than', {'gt': _safe_repr(gt)})
+ return x
+ except TypeError:
+ raise TypeError(f"Unable to apply constraint 'gt' to supplied value {x}")
+
+
+def greater_than_or_equal_validator(x: Any, ge: Any) -> Any:
+ try:
+ if not (x >= ge):
+ raise PydanticKnownError('greater_than_equal', {'ge': _safe_repr(ge)})
+ return x
+ except TypeError:
+ raise TypeError(f"Unable to apply constraint 'ge' to supplied value {x}")
+
+
+def less_than_validator(x: Any, lt: Any) -> Any:
+ try:
+ if not (x < lt):
+ raise PydanticKnownError('less_than', {'lt': _safe_repr(lt)})
+ return x
+ except TypeError:
+ raise TypeError(f"Unable to apply constraint 'lt' to supplied value {x}")
+
+
+def less_than_or_equal_validator(x: Any, le: Any) -> Any:
+ try:
+ if not (x <= le):
+ raise PydanticKnownError('less_than_equal', {'le': _safe_repr(le)})
+ return x
+ except TypeError:
+ raise TypeError(f"Unable to apply constraint 'le' to supplied value {x}")
+
+
+def multiple_of_validator(x: Any, multiple_of: Any) -> Any:
+ try:
+ if x % multiple_of:
+ raise PydanticKnownError('multiple_of', {'multiple_of': _safe_repr(multiple_of)})
+ return x
+ except TypeError:
+ raise TypeError(f"Unable to apply constraint 'multiple_of' to supplied value {x}")
+
+
+def min_length_validator(x: Any, min_length: Any) -> Any:
+ try:
+ if not (len(x) >= min_length):
+ raise PydanticKnownError(
+ 'too_short', {'field_type': 'Value', 'min_length': min_length, 'actual_length': len(x)}
+ )
+ return x
+ except TypeError:
+ raise TypeError(f"Unable to apply constraint 'min_length' to supplied value {x}")
+
+
+def max_length_validator(x: Any, max_length: Any) -> Any:
+ try:
+ if len(x) > max_length:
+ raise PydanticKnownError(
+ 'too_long',
+ {'field_type': 'Value', 'max_length': max_length, 'actual_length': len(x)},
+ )
+ return x
+ except TypeError:
+ raise TypeError(f"Unable to apply constraint 'max_length' to supplied value {x}")
+
+
+def _extract_decimal_digits_info(decimal: Decimal) -> tuple[int, int]:
+ """Compute the total number of digits and decimal places for a given [`Decimal`][decimal.Decimal] instance.
+
+ This function handles both normalized and non-normalized Decimal instances.
+ Example: Decimal('1.230') -> 4 digits, 3 decimal places
+
+ Args:
+ decimal (Decimal): The decimal number to analyze.
+
+ Returns:
+ tuple[int, int]: A tuple containing the number of decimal places and total digits.
+
+ Though this could be divided into two separate functions, the logic is easier to follow if we couple the computation
+ of the number of decimals and digits together.
+ """
+ decimal_tuple = decimal.as_tuple()
+ if not isinstance(decimal_tuple.exponent, int):
+ raise TypeError(f'Unable to extract decimal digits info from supplied value {decimal}')
+ exponent = decimal_tuple.exponent
+ num_digits = len(decimal_tuple.digits)
+
+ if exponent >= 0:
+ # A positive exponent adds that many trailing zeros
+ # Ex: digit_tuple=(1, 2, 3), exponent=2 -> 12300 -> 0 decimal places, 5 digits
+ num_digits += exponent
+ decimal_places = 0
+ else:
+ # If the absolute value of the negative exponent is larger than the
+ # number of digits, then it's the same as the number of digits,
+ # because it'll consume all the digits in digit_tuple and then
+ # add abs(exponent) - len(digit_tuple) leading zeros after the decimal point.
+ # Ex: digit_tuple=(1, 2, 3), exponent=-2 -> 1.23 -> 2 decimal places, 3 digits
+ # Ex: digit_tuple=(1, 2, 3), exponent=-4 -> 0.0123 -> 4 decimal places, 4 digits
+ decimal_places = abs(exponent)
+ num_digits = max(num_digits, decimal_places)
+
+ return decimal_places, num_digits
+
+
+def max_digits_validator(x: Any, max_digits: Any) -> Any:
+ _, num_digits = _extract_decimal_digits_info(x)
+ _, normalized_num_digits = _extract_decimal_digits_info(x.normalize())
+
+ try:
+ if (num_digits > max_digits) and (normalized_num_digits > max_digits):
+ raise PydanticKnownError(
+ 'decimal_max_digits',
+ {'max_digits': max_digits},
+ )
+ return x
+ except TypeError:
+ raise TypeError(f"Unable to apply constraint 'max_digits' to supplied value {x}")
+
+
+def decimal_places_validator(x: Any, decimal_places: Any) -> Any:
+ decimal_places_, _ = _extract_decimal_digits_info(x)
+ normalized_decimal_places, _ = _extract_decimal_digits_info(x.normalize())
+
+ try:
+ if (decimal_places_ > decimal_places) and (normalized_decimal_places > decimal_places):
+ raise PydanticKnownError(
+ 'decimal_max_places',
+ {'decimal_places': decimal_places},
+ )
+ return x
+ except TypeError:
+ raise TypeError(f"Unable to apply constraint 'decimal_places' to supplied value {x}")
+
+
+NUMERIC_VALIDATOR_LOOKUP: dict[str, Callable] = {
+ 'gt': greater_than_validator,
+ 'ge': greater_than_or_equal_validator,
+ 'lt': less_than_validator,
+ 'le': less_than_or_equal_validator,
+ 'multiple_of': multiple_of_validator,
+ 'min_length': min_length_validator,
+ 'max_length': max_length_validator,
+ 'max_digits': max_digits_validator,
+ 'decimal_places': decimal_places_validator,
+}
+
+IpType = Union[IPv4Address, IPv6Address, IPv4Network, IPv6Network, IPv4Interface, IPv6Interface]
+
+IP_VALIDATOR_LOOKUP: dict[type[IpType], Callable] = {
+ IPv4Address: ip_v4_address_validator,
+ IPv6Address: ip_v6_address_validator,
+ IPv4Network: ip_v4_network_validator,
+ IPv6Network: ip_v6_network_validator,
+ IPv4Interface: ip_v4_interface_validator,
+ IPv6Interface: ip_v6_interface_validator,
+}