about summary refs log tree commit diff
path: root/.venv/lib/python3.12/site-packages/pydantic/deprecated
diff options
context:
space:
mode:
Diffstat (limited to '.venv/lib/python3.12/site-packages/pydantic/deprecated')
-rw-r--r--.venv/lib/python3.12/site-packages/pydantic/deprecated/__init__.py0
-rw-r--r--.venv/lib/python3.12/site-packages/pydantic/deprecated/class_validators.py256
-rw-r--r--.venv/lib/python3.12/site-packages/pydantic/deprecated/config.py72
-rw-r--r--.venv/lib/python3.12/site-packages/pydantic/deprecated/copy_internals.py224
-rw-r--r--.venv/lib/python3.12/site-packages/pydantic/deprecated/decorator.py283
-rw-r--r--.venv/lib/python3.12/site-packages/pydantic/deprecated/json.py141
-rw-r--r--.venv/lib/python3.12/site-packages/pydantic/deprecated/parse.py80
-rw-r--r--.venv/lib/python3.12/site-packages/pydantic/deprecated/tools.py103
8 files changed, 1159 insertions, 0 deletions
diff --git a/.venv/lib/python3.12/site-packages/pydantic/deprecated/__init__.py b/.venv/lib/python3.12/site-packages/pydantic/deprecated/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pydantic/deprecated/__init__.py
diff --git a/.venv/lib/python3.12/site-packages/pydantic/deprecated/class_validators.py b/.venv/lib/python3.12/site-packages/pydantic/deprecated/class_validators.py
new file mode 100644
index 00000000..810b4687
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pydantic/deprecated/class_validators.py
@@ -0,0 +1,256 @@
+"""Old `@validator` and `@root_validator` function validators from V1."""
+
+from __future__ import annotations as _annotations
+
+from functools import partial, partialmethod
+from types import FunctionType
+from typing import TYPE_CHECKING, Any, Callable, TypeVar, Union, overload
+from warnings import warn
+
+from typing_extensions import Literal, Protocol, TypeAlias, deprecated
+
+from .._internal import _decorators, _decorators_v1
+from ..errors import PydanticUserError
+from ..warnings import PydanticDeprecatedSince20
+
+_ALLOW_REUSE_WARNING_MESSAGE = '`allow_reuse` is deprecated and will be ignored; it should no longer be necessary'
+
+
+if TYPE_CHECKING:
+
+    class _OnlyValueValidatorClsMethod(Protocol):
+        def __call__(self, __cls: Any, __value: Any) -> Any: ...
+
+    class _V1ValidatorWithValuesClsMethod(Protocol):
+        def __call__(self, __cls: Any, __value: Any, values: dict[str, Any]) -> Any: ...
+
+    class _V1ValidatorWithValuesKwOnlyClsMethod(Protocol):
+        def __call__(self, __cls: Any, __value: Any, *, values: dict[str, Any]) -> Any: ...
+
+    class _V1ValidatorWithKwargsClsMethod(Protocol):
+        def __call__(self, __cls: Any, **kwargs: Any) -> Any: ...
+
+    class _V1ValidatorWithValuesAndKwargsClsMethod(Protocol):
+        def __call__(self, __cls: Any, values: dict[str, Any], **kwargs: Any) -> Any: ...
+
+    class _V1RootValidatorClsMethod(Protocol):
+        def __call__(
+            self, __cls: Any, __values: _decorators_v1.RootValidatorValues
+        ) -> _decorators_v1.RootValidatorValues: ...
+
+    V1Validator = Union[
+        _OnlyValueValidatorClsMethod,
+        _V1ValidatorWithValuesClsMethod,
+        _V1ValidatorWithValuesKwOnlyClsMethod,
+        _V1ValidatorWithKwargsClsMethod,
+        _V1ValidatorWithValuesAndKwargsClsMethod,
+        _decorators_v1.V1ValidatorWithValues,
+        _decorators_v1.V1ValidatorWithValuesKwOnly,
+        _decorators_v1.V1ValidatorWithKwargs,
+        _decorators_v1.V1ValidatorWithValuesAndKwargs,
+    ]
+
+    V1RootValidator = Union[
+        _V1RootValidatorClsMethod,
+        _decorators_v1.V1RootValidatorFunction,
+    ]
+
+    _PartialClsOrStaticMethod: TypeAlias = Union[classmethod[Any, Any, Any], staticmethod[Any, Any], partialmethod[Any]]
+
+    # Allow both a V1 (assumed pre=False) or V2 (assumed mode='after') validator
+    # We lie to type checkers and say we return the same thing we get
+    # but in reality we return a proxy object that _mostly_ behaves like the wrapped thing
+    _V1ValidatorType = TypeVar('_V1ValidatorType', V1Validator, _PartialClsOrStaticMethod)
+    _V1RootValidatorFunctionType = TypeVar(
+        '_V1RootValidatorFunctionType',
+        _decorators_v1.V1RootValidatorFunction,
+        _V1RootValidatorClsMethod,
+        _PartialClsOrStaticMethod,
+    )
+else:
+    # See PyCharm issues https://youtrack.jetbrains.com/issue/PY-21915
+    # and https://youtrack.jetbrains.com/issue/PY-51428
+    DeprecationWarning = PydanticDeprecatedSince20
+
+
+@deprecated(
+    'Pydantic V1 style `@validator` validators are deprecated.'
+    ' You should migrate to Pydantic V2 style `@field_validator` validators,'
+    ' see the migration guide for more details',
+    category=None,
+)
+def validator(
+    __field: str,
+    *fields: str,
+    pre: bool = False,
+    each_item: bool = False,
+    always: bool = False,
+    check_fields: bool | None = None,
+    allow_reuse: bool = False,
+) -> Callable[[_V1ValidatorType], _V1ValidatorType]:
+    """Decorate methods on the class indicating that they should be used to validate fields.
+
+    Args:
+        __field (str): The first field the validator should be called on; this is separate
+            from `fields` to ensure an error is raised if you don't pass at least one.
+        *fields (str): Additional field(s) the validator should be called on.
+        pre (bool, optional): Whether this validator should be called before the standard
+            validators (else after). Defaults to False.
+        each_item (bool, optional): For complex objects (sets, lists etc.) whether to validate
+            individual elements rather than the whole object. Defaults to False.
+        always (bool, optional): Whether this method and other validators should be called even if
+            the value is missing. Defaults to False.
+        check_fields (bool | None, optional): Whether to check that the fields actually exist on the model.
+            Defaults to None.
+        allow_reuse (bool, optional): Whether to track and raise an error if another validator refers to
+            the decorated function. Defaults to False.
+
+    Returns:
+        Callable: A decorator that can be used to decorate a
+            function to be used as a validator.
+    """
+    warn(
+        'Pydantic V1 style `@validator` validators are deprecated.'
+        ' You should migrate to Pydantic V2 style `@field_validator` validators,'
+        ' see the migration guide for more details',
+        DeprecationWarning,
+        stacklevel=2,
+    )
+
+    if allow_reuse is True:  # pragma: no cover
+        warn(_ALLOW_REUSE_WARNING_MESSAGE, DeprecationWarning)
+    fields = __field, *fields
+    if isinstance(fields[0], FunctionType):
+        raise PydanticUserError(
+            '`@validator` should be used with fields and keyword arguments, not bare. '
+            "E.g. usage should be `@validator('<field_name>', ...)`",
+            code='validator-no-fields',
+        )
+    elif not all(isinstance(field, str) for field in fields):
+        raise PydanticUserError(
+            '`@validator` fields should be passed as separate string args. '
+            "E.g. usage should be `@validator('<field_name_1>', '<field_name_2>', ...)`",
+            code='validator-invalid-fields',
+        )
+
+    mode: Literal['before', 'after'] = 'before' if pre is True else 'after'
+
+    def dec(f: Any) -> _decorators.PydanticDescriptorProxy[Any]:
+        if _decorators.is_instance_method_from_sig(f):
+            raise PydanticUserError(
+                '`@validator` cannot be applied to instance methods', code='validator-instance-method'
+            )
+        # auto apply the @classmethod decorator
+        f = _decorators.ensure_classmethod_based_on_signature(f)
+        wrap = _decorators_v1.make_generic_v1_field_validator
+        validator_wrapper_info = _decorators.ValidatorDecoratorInfo(
+            fields=fields,
+            mode=mode,
+            each_item=each_item,
+            always=always,
+            check_fields=check_fields,
+        )
+        return _decorators.PydanticDescriptorProxy(f, validator_wrapper_info, shim=wrap)
+
+    return dec  # type: ignore[return-value]
+
+
+@overload
+def root_validator(
+    *,
+    # if you don't specify `pre` the default is `pre=False`
+    # which means you need to specify `skip_on_failure=True`
+    skip_on_failure: Literal[True],
+    allow_reuse: bool = ...,
+) -> Callable[
+    [_V1RootValidatorFunctionType],
+    _V1RootValidatorFunctionType,
+]: ...
+
+
+@overload
+def root_validator(
+    *,
+    # if you specify `pre=True` then you don't need to specify
+    # `skip_on_failure`, in fact it is not allowed as an argument!
+    pre: Literal[True],
+    allow_reuse: bool = ...,
+) -> Callable[
+    [_V1RootValidatorFunctionType],
+    _V1RootValidatorFunctionType,
+]: ...
+
+
+@overload
+def root_validator(
+    *,
+    # if you explicitly specify `pre=False` then you
+    # MUST specify `skip_on_failure=True`
+    pre: Literal[False],
+    skip_on_failure: Literal[True],
+    allow_reuse: bool = ...,
+) -> Callable[
+    [_V1RootValidatorFunctionType],
+    _V1RootValidatorFunctionType,
+]: ...
+
+
+@deprecated(
+    'Pydantic V1 style `@root_validator` validators are deprecated.'
+    ' You should migrate to Pydantic V2 style `@model_validator` validators,'
+    ' see the migration guide for more details',
+    category=None,
+)
+def root_validator(
+    *__args,
+    pre: bool = False,
+    skip_on_failure: bool = False,
+    allow_reuse: bool = False,
+) -> Any:
+    """Decorate methods on a model indicating that they should be used to validate (and perhaps
+    modify) data either before or after standard model parsing/validation is performed.
+
+    Args:
+        pre (bool, optional): Whether this validator should be called before the standard
+            validators (else after). Defaults to False.
+        skip_on_failure (bool, optional): Whether to stop validation and return as soon as a
+            failure is encountered. Defaults to False.
+        allow_reuse (bool, optional): Whether to track and raise an error if another validator
+            refers to the decorated function. Defaults to False.
+
+    Returns:
+        Any: A decorator that can be used to decorate a function to be used as a root_validator.
+    """
+    warn(
+        'Pydantic V1 style `@root_validator` validators are deprecated.'
+        ' You should migrate to Pydantic V2 style `@model_validator` validators,'
+        ' see the migration guide for more details',
+        DeprecationWarning,
+        stacklevel=2,
+    )
+
+    if __args:
+        # Ensure a nice error is raised if someone attempts to use the bare decorator
+        return root_validator()(*__args)  # type: ignore
+
+    if allow_reuse is True:  # pragma: no cover
+        warn(_ALLOW_REUSE_WARNING_MESSAGE, DeprecationWarning)
+    mode: Literal['before', 'after'] = 'before' if pre is True else 'after'
+    if pre is False and skip_on_failure is not True:
+        raise PydanticUserError(
+            'If you use `@root_validator` with pre=False (the default) you MUST specify `skip_on_failure=True`.'
+            ' Note that `@root_validator` is deprecated and should be replaced with `@model_validator`.',
+            code='root-validator-pre-skip',
+        )
+
+    wrap = partial(_decorators_v1.make_v1_generic_root_validator, pre=pre)
+
+    def dec(f: Callable[..., Any] | classmethod[Any, Any, Any] | staticmethod[Any, Any]) -> Any:
+        if _decorators.is_instance_method_from_sig(f):
+            raise TypeError('`@root_validator` cannot be applied to instance methods')
+        # auto apply the @classmethod decorator
+        res = _decorators.ensure_classmethod_based_on_signature(f)
+        dec_info = _decorators.RootValidatorDecoratorInfo(mode=mode)
+        return _decorators.PydanticDescriptorProxy(res, dec_info, shim=wrap)
+
+    return dec
diff --git a/.venv/lib/python3.12/site-packages/pydantic/deprecated/config.py b/.venv/lib/python3.12/site-packages/pydantic/deprecated/config.py
new file mode 100644
index 00000000..45400c65
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pydantic/deprecated/config.py
@@ -0,0 +1,72 @@
+from __future__ import annotations as _annotations
+
+import warnings
+from typing import TYPE_CHECKING, Any
+
+from typing_extensions import Literal, deprecated
+
+from .._internal import _config
+from ..warnings import PydanticDeprecatedSince20
+
+if not TYPE_CHECKING:
+    # See PyCharm issues https://youtrack.jetbrains.com/issue/PY-21915
+    # and https://youtrack.jetbrains.com/issue/PY-51428
+    DeprecationWarning = PydanticDeprecatedSince20
+
+__all__ = 'BaseConfig', 'Extra'
+
+
+class _ConfigMetaclass(type):
+    def __getattr__(self, item: str) -> Any:
+        try:
+            obj = _config.config_defaults[item]
+            warnings.warn(_config.DEPRECATION_MESSAGE, DeprecationWarning)
+            return obj
+        except KeyError as exc:
+            raise AttributeError(f"type object '{self.__name__}' has no attribute {exc}") from exc
+
+
+@deprecated('BaseConfig is deprecated. Use the `pydantic.ConfigDict` instead.', category=PydanticDeprecatedSince20)
+class BaseConfig(metaclass=_ConfigMetaclass):
+    """This class is only retained for backwards compatibility.
+
+    !!! Warning "Deprecated"
+        BaseConfig is deprecated. Use the [`pydantic.ConfigDict`][pydantic.ConfigDict] instead.
+    """
+
+    def __getattr__(self, item: str) -> Any:
+        try:
+            obj = super().__getattribute__(item)
+            warnings.warn(_config.DEPRECATION_MESSAGE, DeprecationWarning)
+            return obj
+        except AttributeError as exc:
+            try:
+                return getattr(type(self), item)
+            except AttributeError:
+                # re-raising changes the displayed text to reflect that `self` is not a type
+                raise AttributeError(str(exc)) from exc
+
+    def __init_subclass__(cls, **kwargs: Any) -> None:
+        warnings.warn(_config.DEPRECATION_MESSAGE, DeprecationWarning)
+        return super().__init_subclass__(**kwargs)
+
+
+class _ExtraMeta(type):
+    def __getattribute__(self, __name: str) -> Any:
+        # The @deprecated decorator accesses other attributes, so we only emit a warning for the expected ones
+        if __name in {'allow', 'ignore', 'forbid'}:
+            warnings.warn(
+                "`pydantic.config.Extra` is deprecated, use literal values instead (e.g. `extra='allow'`)",
+                DeprecationWarning,
+                stacklevel=2,
+            )
+        return super().__getattribute__(__name)
+
+
+@deprecated(
+    "Extra is deprecated. Use literal values instead (e.g. `extra='allow'`)", category=PydanticDeprecatedSince20
+)
+class Extra(metaclass=_ExtraMeta):
+    allow: Literal['allow'] = 'allow'
+    ignore: Literal['ignore'] = 'ignore'
+    forbid: Literal['forbid'] = 'forbid'
diff --git a/.venv/lib/python3.12/site-packages/pydantic/deprecated/copy_internals.py b/.venv/lib/python3.12/site-packages/pydantic/deprecated/copy_internals.py
new file mode 100644
index 00000000..00e0a8a9
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pydantic/deprecated/copy_internals.py
@@ -0,0 +1,224 @@
+from __future__ import annotations as _annotations
+
+import typing
+from copy import deepcopy
+from enum import Enum
+from typing import Any, Tuple
+
+import typing_extensions
+
+from .._internal import (
+    _model_construction,
+    _typing_extra,
+    _utils,
+)
+
+if typing.TYPE_CHECKING:
+    from .. import BaseModel
+    from .._internal._utils import AbstractSetIntStr, MappingIntStrAny
+
+    AnyClassMethod = classmethod[Any, Any, Any]
+    TupleGenerator = typing.Generator[Tuple[str, Any], None, None]
+    Model = typing.TypeVar('Model', bound='BaseModel')
+    # should be `set[int] | set[str] | dict[int, IncEx] | dict[str, IncEx] | None`, but mypy can't cope
+    IncEx: typing_extensions.TypeAlias = 'set[int] | set[str] | dict[int, Any] | dict[str, Any] | None'
+
+_object_setattr = _model_construction.object_setattr
+
+
+def _iter(
+    self: BaseModel,
+    to_dict: bool = False,
+    by_alias: bool = False,
+    include: AbstractSetIntStr | MappingIntStrAny | None = None,
+    exclude: AbstractSetIntStr | MappingIntStrAny | None = None,
+    exclude_unset: bool = False,
+    exclude_defaults: bool = False,
+    exclude_none: bool = False,
+) -> TupleGenerator:
+    # Merge field set excludes with explicit exclude parameter with explicit overriding field set options.
+    # The extra "is not None" guards are not logically necessary but optimizes performance for the simple case.
+    if exclude is not None:
+        exclude = _utils.ValueItems.merge(
+            {k: v.exclude for k, v in self.__pydantic_fields__.items() if v.exclude is not None}, exclude
+        )
+
+    if include is not None:
+        include = _utils.ValueItems.merge({k: True for k in self.__pydantic_fields__}, include, intersect=True)
+
+    allowed_keys = _calculate_keys(self, include=include, exclude=exclude, exclude_unset=exclude_unset)  # type: ignore
+    if allowed_keys is None and not (to_dict or by_alias or exclude_unset or exclude_defaults or exclude_none):
+        # huge boost for plain _iter()
+        yield from self.__dict__.items()
+        if self.__pydantic_extra__:
+            yield from self.__pydantic_extra__.items()
+        return
+
+    value_exclude = _utils.ValueItems(self, exclude) if exclude is not None else None
+    value_include = _utils.ValueItems(self, include) if include is not None else None
+
+    if self.__pydantic_extra__ is None:
+        items = self.__dict__.items()
+    else:
+        items = list(self.__dict__.items()) + list(self.__pydantic_extra__.items())
+
+    for field_key, v in items:
+        if (allowed_keys is not None and field_key not in allowed_keys) or (exclude_none and v is None):
+            continue
+
+        if exclude_defaults:
+            try:
+                field = self.__pydantic_fields__[field_key]
+            except KeyError:
+                pass
+            else:
+                if not field.is_required() and field.default == v:
+                    continue
+
+        if by_alias and field_key in self.__pydantic_fields__:
+            dict_key = self.__pydantic_fields__[field_key].alias or field_key
+        else:
+            dict_key = field_key
+
+        if to_dict or value_include or value_exclude:
+            v = _get_value(
+                type(self),
+                v,
+                to_dict=to_dict,
+                by_alias=by_alias,
+                include=value_include and value_include.for_element(field_key),
+                exclude=value_exclude and value_exclude.for_element(field_key),
+                exclude_unset=exclude_unset,
+                exclude_defaults=exclude_defaults,
+                exclude_none=exclude_none,
+            )
+        yield dict_key, v
+
+
+def _copy_and_set_values(
+    self: Model,
+    values: dict[str, Any],
+    fields_set: set[str],
+    extra: dict[str, Any] | None = None,
+    private: dict[str, Any] | None = None,
+    *,
+    deep: bool,  # UP006
+) -> Model:
+    if deep:
+        # chances of having empty dict here are quite low for using smart_deepcopy
+        values = deepcopy(values)
+        extra = deepcopy(extra)
+        private = deepcopy(private)
+
+    cls = self.__class__
+    m = cls.__new__(cls)
+    _object_setattr(m, '__dict__', values)
+    _object_setattr(m, '__pydantic_extra__', extra)
+    _object_setattr(m, '__pydantic_fields_set__', fields_set)
+    _object_setattr(m, '__pydantic_private__', private)
+
+    return m
+
+
+@typing.no_type_check
+def _get_value(
+    cls: type[BaseModel],
+    v: Any,
+    to_dict: bool,
+    by_alias: bool,
+    include: AbstractSetIntStr | MappingIntStrAny | None,
+    exclude: AbstractSetIntStr | MappingIntStrAny | None,
+    exclude_unset: bool,
+    exclude_defaults: bool,
+    exclude_none: bool,
+) -> Any:
+    from .. import BaseModel
+
+    if isinstance(v, BaseModel):
+        if to_dict:
+            return v.model_dump(
+                by_alias=by_alias,
+                exclude_unset=exclude_unset,
+                exclude_defaults=exclude_defaults,
+                include=include,  # type: ignore
+                exclude=exclude,  # type: ignore
+                exclude_none=exclude_none,
+            )
+        else:
+            return v.copy(include=include, exclude=exclude)
+
+    value_exclude = _utils.ValueItems(v, exclude) if exclude else None
+    value_include = _utils.ValueItems(v, include) if include else None
+
+    if isinstance(v, dict):
+        return {
+            k_: _get_value(
+                cls,
+                v_,
+                to_dict=to_dict,
+                by_alias=by_alias,
+                exclude_unset=exclude_unset,
+                exclude_defaults=exclude_defaults,
+                include=value_include and value_include.for_element(k_),
+                exclude=value_exclude and value_exclude.for_element(k_),
+                exclude_none=exclude_none,
+            )
+            for k_, v_ in v.items()
+            if (not value_exclude or not value_exclude.is_excluded(k_))
+            and (not value_include or value_include.is_included(k_))
+        }
+
+    elif _utils.sequence_like(v):
+        seq_args = (
+            _get_value(
+                cls,
+                v_,
+                to_dict=to_dict,
+                by_alias=by_alias,
+                exclude_unset=exclude_unset,
+                exclude_defaults=exclude_defaults,
+                include=value_include and value_include.for_element(i),
+                exclude=value_exclude and value_exclude.for_element(i),
+                exclude_none=exclude_none,
+            )
+            for i, v_ in enumerate(v)
+            if (not value_exclude or not value_exclude.is_excluded(i))
+            and (not value_include or value_include.is_included(i))
+        )
+
+        return v.__class__(*seq_args) if _typing_extra.is_namedtuple(v.__class__) else v.__class__(seq_args)
+
+    elif isinstance(v, Enum) and getattr(cls.model_config, 'use_enum_values', False):
+        return v.value
+
+    else:
+        return v
+
+
+def _calculate_keys(
+    self: BaseModel,
+    include: MappingIntStrAny | None,
+    exclude: MappingIntStrAny | None,
+    exclude_unset: bool,
+    update: typing.Dict[str, Any] | None = None,  # noqa UP006
+) -> typing.AbstractSet[str] | None:
+    if include is None and exclude is None and exclude_unset is False:
+        return None
+
+    keys: typing.AbstractSet[str]
+    if exclude_unset:
+        keys = self.__pydantic_fields_set__.copy()
+    else:
+        keys = set(self.__dict__.keys())
+        keys = keys | (self.__pydantic_extra__ or {}).keys()
+
+    if include is not None:
+        keys &= include.keys()
+
+    if update:
+        keys -= update.keys()
+
+    if exclude:
+        keys -= {k for k, v in exclude.items() if _utils.ValueItems.is_true(v)}
+
+    return keys
diff --git a/.venv/lib/python3.12/site-packages/pydantic/deprecated/decorator.py b/.venv/lib/python3.12/site-packages/pydantic/deprecated/decorator.py
new file mode 100644
index 00000000..27ee4603
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pydantic/deprecated/decorator.py
@@ -0,0 +1,283 @@
+import warnings
+from functools import wraps
+from typing import TYPE_CHECKING, Any, Callable, Dict, List, Mapping, Optional, Tuple, Type, TypeVar, Union, overload
+
+from typing_extensions import deprecated
+
+from .._internal import _config, _typing_extra
+from ..alias_generators import to_pascal
+from ..errors import PydanticUserError
+from ..functional_validators import field_validator
+from ..main import BaseModel, create_model
+from ..warnings import PydanticDeprecatedSince20
+
+if not TYPE_CHECKING:
+    # See PyCharm issues https://youtrack.jetbrains.com/issue/PY-21915
+    # and https://youtrack.jetbrains.com/issue/PY-51428
+    DeprecationWarning = PydanticDeprecatedSince20
+
+__all__ = ('validate_arguments',)
+
+if TYPE_CHECKING:
+    AnyCallable = Callable[..., Any]
+
+    AnyCallableT = TypeVar('AnyCallableT', bound=AnyCallable)
+    ConfigType = Union[None, Type[Any], Dict[str, Any]]
+
+
+@overload
+def validate_arguments(
+    func: None = None, *, config: 'ConfigType' = None
+) -> Callable[['AnyCallableT'], 'AnyCallableT']: ...
+
+
+@overload
+def validate_arguments(func: 'AnyCallableT') -> 'AnyCallableT': ...
+
+
+@deprecated(
+    'The `validate_arguments` method is deprecated; use `validate_call` instead.',
+    category=None,
+)
+def validate_arguments(func: Optional['AnyCallableT'] = None, *, config: 'ConfigType' = None) -> Any:
+    """Decorator to validate the arguments passed to a function."""
+    warnings.warn(
+        'The `validate_arguments` method is deprecated; use `validate_call` instead.',
+        PydanticDeprecatedSince20,
+        stacklevel=2,
+    )
+
+    def validate(_func: 'AnyCallable') -> 'AnyCallable':
+        vd = ValidatedFunction(_func, config)
+
+        @wraps(_func)
+        def wrapper_function(*args: Any, **kwargs: Any) -> Any:
+            return vd.call(*args, **kwargs)
+
+        wrapper_function.vd = vd  # type: ignore
+        wrapper_function.validate = vd.init_model_instance  # type: ignore
+        wrapper_function.raw_function = vd.raw_function  # type: ignore
+        wrapper_function.model = vd.model  # type: ignore
+        return wrapper_function
+
+    if func:
+        return validate(func)
+    else:
+        return validate
+
+
+ALT_V_ARGS = 'v__args'
+ALT_V_KWARGS = 'v__kwargs'
+V_POSITIONAL_ONLY_NAME = 'v__positional_only'
+V_DUPLICATE_KWARGS = 'v__duplicate_kwargs'
+
+
+class ValidatedFunction:
+    def __init__(self, function: 'AnyCallable', config: 'ConfigType'):
+        from inspect import Parameter, signature
+
+        parameters: Mapping[str, Parameter] = signature(function).parameters
+
+        if parameters.keys() & {ALT_V_ARGS, ALT_V_KWARGS, V_POSITIONAL_ONLY_NAME, V_DUPLICATE_KWARGS}:
+            raise PydanticUserError(
+                f'"{ALT_V_ARGS}", "{ALT_V_KWARGS}", "{V_POSITIONAL_ONLY_NAME}" and "{V_DUPLICATE_KWARGS}" '
+                f'are not permitted as argument names when using the "{validate_arguments.__name__}" decorator',
+                code=None,
+            )
+
+        self.raw_function = function
+        self.arg_mapping: Dict[int, str] = {}
+        self.positional_only_args: set[str] = set()
+        self.v_args_name = 'args'
+        self.v_kwargs_name = 'kwargs'
+
+        type_hints = _typing_extra.get_type_hints(function, include_extras=True)
+        takes_args = False
+        takes_kwargs = False
+        fields: Dict[str, Tuple[Any, Any]] = {}
+        for i, (name, p) in enumerate(parameters.items()):
+            if p.annotation is p.empty:
+                annotation = Any
+            else:
+                annotation = type_hints[name]
+
+            default = ... if p.default is p.empty else p.default
+            if p.kind == Parameter.POSITIONAL_ONLY:
+                self.arg_mapping[i] = name
+                fields[name] = annotation, default
+                fields[V_POSITIONAL_ONLY_NAME] = List[str], None
+                self.positional_only_args.add(name)
+            elif p.kind == Parameter.POSITIONAL_OR_KEYWORD:
+                self.arg_mapping[i] = name
+                fields[name] = annotation, default
+                fields[V_DUPLICATE_KWARGS] = List[str], None
+            elif p.kind == Parameter.KEYWORD_ONLY:
+                fields[name] = annotation, default
+            elif p.kind == Parameter.VAR_POSITIONAL:
+                self.v_args_name = name
+                fields[name] = Tuple[annotation, ...], None
+                takes_args = True
+            else:
+                assert p.kind == Parameter.VAR_KEYWORD, p.kind
+                self.v_kwargs_name = name
+                fields[name] = Dict[str, annotation], None
+                takes_kwargs = True
+
+        # these checks avoid a clash between "args" and a field with that name
+        if not takes_args and self.v_args_name in fields:
+            self.v_args_name = ALT_V_ARGS
+
+        # same with "kwargs"
+        if not takes_kwargs and self.v_kwargs_name in fields:
+            self.v_kwargs_name = ALT_V_KWARGS
+
+        if not takes_args:
+            # we add the field so validation below can raise the correct exception
+            fields[self.v_args_name] = List[Any], None
+
+        if not takes_kwargs:
+            # same with kwargs
+            fields[self.v_kwargs_name] = Dict[Any, Any], None
+
+        self.create_model(fields, takes_args, takes_kwargs, config)
+
+    def init_model_instance(self, *args: Any, **kwargs: Any) -> BaseModel:
+        values = self.build_values(args, kwargs)
+        return self.model(**values)
+
+    def call(self, *args: Any, **kwargs: Any) -> Any:
+        m = self.init_model_instance(*args, **kwargs)
+        return self.execute(m)
+
+    def build_values(self, args: Tuple[Any, ...], kwargs: Dict[str, Any]) -> Dict[str, Any]:
+        values: Dict[str, Any] = {}
+        if args:
+            arg_iter = enumerate(args)
+            while True:
+                try:
+                    i, a = next(arg_iter)
+                except StopIteration:
+                    break
+                arg_name = self.arg_mapping.get(i)
+                if arg_name is not None:
+                    values[arg_name] = a
+                else:
+                    values[self.v_args_name] = [a] + [a for _, a in arg_iter]
+                    break
+
+        var_kwargs: Dict[str, Any] = {}
+        wrong_positional_args = []
+        duplicate_kwargs = []
+        fields_alias = [
+            field.alias
+            for name, field in self.model.__pydantic_fields__.items()
+            if name not in (self.v_args_name, self.v_kwargs_name)
+        ]
+        non_var_fields = set(self.model.__pydantic_fields__) - {self.v_args_name, self.v_kwargs_name}
+        for k, v in kwargs.items():
+            if k in non_var_fields or k in fields_alias:
+                if k in self.positional_only_args:
+                    wrong_positional_args.append(k)
+                if k in values:
+                    duplicate_kwargs.append(k)
+                values[k] = v
+            else:
+                var_kwargs[k] = v
+
+        if var_kwargs:
+            values[self.v_kwargs_name] = var_kwargs
+        if wrong_positional_args:
+            values[V_POSITIONAL_ONLY_NAME] = wrong_positional_args
+        if duplicate_kwargs:
+            values[V_DUPLICATE_KWARGS] = duplicate_kwargs
+        return values
+
+    def execute(self, m: BaseModel) -> Any:
+        d = {
+            k: v
+            for k, v in m.__dict__.items()
+            if k in m.__pydantic_fields_set__ or m.__pydantic_fields__[k].default_factory
+        }
+        var_kwargs = d.pop(self.v_kwargs_name, {})
+
+        if self.v_args_name in d:
+            args_: List[Any] = []
+            in_kwargs = False
+            kwargs = {}
+            for name, value in d.items():
+                if in_kwargs:
+                    kwargs[name] = value
+                elif name == self.v_args_name:
+                    args_ += value
+                    in_kwargs = True
+                else:
+                    args_.append(value)
+            return self.raw_function(*args_, **kwargs, **var_kwargs)
+        elif self.positional_only_args:
+            args_ = []
+            kwargs = {}
+            for name, value in d.items():
+                if name in self.positional_only_args:
+                    args_.append(value)
+                else:
+                    kwargs[name] = value
+            return self.raw_function(*args_, **kwargs, **var_kwargs)
+        else:
+            return self.raw_function(**d, **var_kwargs)
+
+    def create_model(self, fields: Dict[str, Any], takes_args: bool, takes_kwargs: bool, config: 'ConfigType') -> None:
+        pos_args = len(self.arg_mapping)
+
+        config_wrapper = _config.ConfigWrapper(config)
+
+        if config_wrapper.alias_generator:
+            raise PydanticUserError(
+                'Setting the "alias_generator" property on custom Config for '
+                '@validate_arguments is not yet supported, please remove.',
+                code=None,
+            )
+        if config_wrapper.extra is None:
+            config_wrapper.config_dict['extra'] = 'forbid'
+
+        class DecoratorBaseModel(BaseModel):
+            @field_validator(self.v_args_name, check_fields=False)
+            @classmethod
+            def check_args(cls, v: Optional[List[Any]]) -> Optional[List[Any]]:
+                if takes_args or v is None:
+                    return v
+
+                raise TypeError(f'{pos_args} positional arguments expected but {pos_args + len(v)} given')
+
+            @field_validator(self.v_kwargs_name, check_fields=False)
+            @classmethod
+            def check_kwargs(cls, v: Optional[Dict[str, Any]]) -> Optional[Dict[str, Any]]:
+                if takes_kwargs or v is None:
+                    return v
+
+                plural = '' if len(v) == 1 else 's'
+                keys = ', '.join(map(repr, v.keys()))
+                raise TypeError(f'unexpected keyword argument{plural}: {keys}')
+
+            @field_validator(V_POSITIONAL_ONLY_NAME, check_fields=False)
+            @classmethod
+            def check_positional_only(cls, v: Optional[List[str]]) -> None:
+                if v is None:
+                    return
+
+                plural = '' if len(v) == 1 else 's'
+                keys = ', '.join(map(repr, v))
+                raise TypeError(f'positional-only argument{plural} passed as keyword argument{plural}: {keys}')
+
+            @field_validator(V_DUPLICATE_KWARGS, check_fields=False)
+            @classmethod
+            def check_duplicate_kwargs(cls, v: Optional[List[str]]) -> None:
+                if v is None:
+                    return
+
+                plural = '' if len(v) == 1 else 's'
+                keys = ', '.join(map(repr, v))
+                raise TypeError(f'multiple values for argument{plural}: {keys}')
+
+            model_config = config_wrapper.config_dict
+
+        self.model = create_model(to_pascal(self.raw_function.__name__), __base__=DecoratorBaseModel, **fields)
diff --git a/.venv/lib/python3.12/site-packages/pydantic/deprecated/json.py b/.venv/lib/python3.12/site-packages/pydantic/deprecated/json.py
new file mode 100644
index 00000000..d031aa5b
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pydantic/deprecated/json.py
@@ -0,0 +1,141 @@
+import datetime
+import warnings
+from collections import deque
+from decimal import Decimal
+from enum import Enum
+from ipaddress import IPv4Address, IPv4Interface, IPv4Network, IPv6Address, IPv6Interface, IPv6Network
+from pathlib import Path
+from re import Pattern
+from types import GeneratorType
+from typing import TYPE_CHECKING, Any, Callable, Dict, Type, Union
+from uuid import UUID
+
+from typing_extensions import deprecated
+
+from .._internal._import_utils import import_cached_base_model
+from ..color import Color
+from ..networks import NameEmail
+from ..types import SecretBytes, SecretStr
+from ..warnings import PydanticDeprecatedSince20
+
+if not TYPE_CHECKING:
+    # See PyCharm issues https://youtrack.jetbrains.com/issue/PY-21915
+    # and https://youtrack.jetbrains.com/issue/PY-51428
+    DeprecationWarning = PydanticDeprecatedSince20
+
+__all__ = 'pydantic_encoder', 'custom_pydantic_encoder', 'timedelta_isoformat'
+
+
+def isoformat(o: Union[datetime.date, datetime.time]) -> str:
+    return o.isoformat()
+
+
+def decimal_encoder(dec_value: Decimal) -> Union[int, float]:
+    """Encodes a Decimal as int of there's no exponent, otherwise float.
+
+    This is useful when we use ConstrainedDecimal to represent Numeric(x,0)
+    where a integer (but not int typed) is used. Encoding this as a float
+    results in failed round-tripping between encode and parse.
+    Our Id type is a prime example of this.
+
+    >>> decimal_encoder(Decimal("1.0"))
+    1.0
+
+    >>> decimal_encoder(Decimal("1"))
+    1
+    """
+    exponent = dec_value.as_tuple().exponent
+    if isinstance(exponent, int) and exponent >= 0:
+        return int(dec_value)
+    else:
+        return float(dec_value)
+
+
+ENCODERS_BY_TYPE: Dict[Type[Any], Callable[[Any], Any]] = {
+    bytes: lambda o: o.decode(),
+    Color: str,
+    datetime.date: isoformat,
+    datetime.datetime: isoformat,
+    datetime.time: isoformat,
+    datetime.timedelta: lambda td: td.total_seconds(),
+    Decimal: decimal_encoder,
+    Enum: lambda o: o.value,
+    frozenset: list,
+    deque: list,
+    GeneratorType: list,
+    IPv4Address: str,
+    IPv4Interface: str,
+    IPv4Network: str,
+    IPv6Address: str,
+    IPv6Interface: str,
+    IPv6Network: str,
+    NameEmail: str,
+    Path: str,
+    Pattern: lambda o: o.pattern,
+    SecretBytes: str,
+    SecretStr: str,
+    set: list,
+    UUID: str,
+}
+
+
+@deprecated(
+    '`pydantic_encoder` is deprecated, use `pydantic_core.to_jsonable_python` instead.',
+    category=None,
+)
+def pydantic_encoder(obj: Any) -> Any:
+    warnings.warn(
+        '`pydantic_encoder` is deprecated, use `pydantic_core.to_jsonable_python` instead.',
+        category=PydanticDeprecatedSince20,
+        stacklevel=2,
+    )
+    from dataclasses import asdict, is_dataclass
+
+    BaseModel = import_cached_base_model()
+
+    if isinstance(obj, BaseModel):
+        return obj.model_dump()
+    elif is_dataclass(obj):
+        return asdict(obj)  # type: ignore
+
+    # Check the class type and its superclasses for a matching encoder
+    for base in obj.__class__.__mro__[:-1]:
+        try:
+            encoder = ENCODERS_BY_TYPE[base]
+        except KeyError:
+            continue
+        return encoder(obj)
+    else:  # We have exited the for loop without finding a suitable encoder
+        raise TypeError(f"Object of type '{obj.__class__.__name__}' is not JSON serializable")
+
+
+# TODO: Add a suggested migration path once there is a way to use custom encoders
+@deprecated(
+    '`custom_pydantic_encoder` is deprecated, use `BaseModel.model_dump` instead.',
+    category=None,
+)
+def custom_pydantic_encoder(type_encoders: Dict[Any, Callable[[Type[Any]], Any]], obj: Any) -> Any:
+    warnings.warn(
+        '`custom_pydantic_encoder` is deprecated, use `BaseModel.model_dump` instead.',
+        category=PydanticDeprecatedSince20,
+        stacklevel=2,
+    )
+    # Check the class type and its superclasses for a matching encoder
+    for base in obj.__class__.__mro__[:-1]:
+        try:
+            encoder = type_encoders[base]
+        except KeyError:
+            continue
+
+        return encoder(obj)
+    else:  # We have exited the for loop without finding a suitable encoder
+        return pydantic_encoder(obj)
+
+
+@deprecated('`timedelta_isoformat` is deprecated.', category=None)
+def timedelta_isoformat(td: datetime.timedelta) -> str:
+    """ISO 8601 encoding for Python timedelta object."""
+    warnings.warn('`timedelta_isoformat` is deprecated.', category=PydanticDeprecatedSince20, stacklevel=2)
+    minutes, seconds = divmod(td.seconds, 60)
+    hours, minutes = divmod(minutes, 60)
+    return f'{"-" if td.days < 0 else ""}P{abs(td.days)}DT{hours:d}H{minutes:d}M{seconds:d}.{td.microseconds:06d}S'
diff --git a/.venv/lib/python3.12/site-packages/pydantic/deprecated/parse.py b/.venv/lib/python3.12/site-packages/pydantic/deprecated/parse.py
new file mode 100644
index 00000000..2a92e62b
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pydantic/deprecated/parse.py
@@ -0,0 +1,80 @@
+from __future__ import annotations
+
+import json
+import pickle
+import warnings
+from enum import Enum
+from pathlib import Path
+from typing import TYPE_CHECKING, Any, Callable
+
+from typing_extensions import deprecated
+
+from ..warnings import PydanticDeprecatedSince20
+
+if not TYPE_CHECKING:
+    # See PyCharm issues https://youtrack.jetbrains.com/issue/PY-21915
+    # and https://youtrack.jetbrains.com/issue/PY-51428
+    DeprecationWarning = PydanticDeprecatedSince20
+
+
+class Protocol(str, Enum):
+    json = 'json'
+    pickle = 'pickle'
+
+
+@deprecated('`load_str_bytes` is deprecated.', category=None)
+def load_str_bytes(
+    b: str | bytes,
+    *,
+    content_type: str | None = None,
+    encoding: str = 'utf8',
+    proto: Protocol | None = None,
+    allow_pickle: bool = False,
+    json_loads: Callable[[str], Any] = json.loads,
+) -> Any:
+    warnings.warn('`load_str_bytes` is deprecated.', category=PydanticDeprecatedSince20, stacklevel=2)
+    if proto is None and content_type:
+        if content_type.endswith(('json', 'javascript')):
+            pass
+        elif allow_pickle and content_type.endswith('pickle'):
+            proto = Protocol.pickle
+        else:
+            raise TypeError(f'Unknown content-type: {content_type}')
+
+    proto = proto or Protocol.json
+
+    if proto == Protocol.json:
+        if isinstance(b, bytes):
+            b = b.decode(encoding)
+        return json_loads(b)  # type: ignore
+    elif proto == Protocol.pickle:
+        if not allow_pickle:
+            raise RuntimeError('Trying to decode with pickle with allow_pickle=False')
+        bb = b if isinstance(b, bytes) else b.encode()  # type: ignore
+        return pickle.loads(bb)
+    else:
+        raise TypeError(f'Unknown protocol: {proto}')
+
+
+@deprecated('`load_file` is deprecated.', category=None)
+def load_file(
+    path: str | Path,
+    *,
+    content_type: str | None = None,
+    encoding: str = 'utf8',
+    proto: Protocol | None = None,
+    allow_pickle: bool = False,
+    json_loads: Callable[[str], Any] = json.loads,
+) -> Any:
+    warnings.warn('`load_file` is deprecated.', category=PydanticDeprecatedSince20, stacklevel=2)
+    path = Path(path)
+    b = path.read_bytes()
+    if content_type is None:
+        if path.suffix in ('.js', '.json'):
+            proto = Protocol.json
+        elif path.suffix == '.pkl':
+            proto = Protocol.pickle
+
+    return load_str_bytes(
+        b, proto=proto, content_type=content_type, encoding=encoding, allow_pickle=allow_pickle, json_loads=json_loads
+    )
diff --git a/.venv/lib/python3.12/site-packages/pydantic/deprecated/tools.py b/.venv/lib/python3.12/site-packages/pydantic/deprecated/tools.py
new file mode 100644
index 00000000..b04eae40
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pydantic/deprecated/tools.py
@@ -0,0 +1,103 @@
+from __future__ import annotations
+
+import json
+import warnings
+from typing import TYPE_CHECKING, Any, Callable, Type, TypeVar, Union
+
+from typing_extensions import deprecated
+
+from ..json_schema import DEFAULT_REF_TEMPLATE, GenerateJsonSchema
+from ..type_adapter import TypeAdapter
+from ..warnings import PydanticDeprecatedSince20
+
+if not TYPE_CHECKING:
+    # See PyCharm issues https://youtrack.jetbrains.com/issue/PY-21915
+    # and https://youtrack.jetbrains.com/issue/PY-51428
+    DeprecationWarning = PydanticDeprecatedSince20
+
+__all__ = 'parse_obj_as', 'schema_of', 'schema_json_of'
+
+NameFactory = Union[str, Callable[[Type[Any]], str]]
+
+
+T = TypeVar('T')
+
+
+@deprecated(
+    '`parse_obj_as` is deprecated. Use `pydantic.TypeAdapter.validate_python` instead.',
+    category=None,
+)
+def parse_obj_as(type_: type[T], obj: Any, type_name: NameFactory | None = None) -> T:
+    warnings.warn(
+        '`parse_obj_as` is deprecated. Use `pydantic.TypeAdapter.validate_python` instead.',
+        category=PydanticDeprecatedSince20,
+        stacklevel=2,
+    )
+    if type_name is not None:  # pragma: no cover
+        warnings.warn(
+            'The type_name parameter is deprecated. parse_obj_as no longer creates temporary models',
+            DeprecationWarning,
+            stacklevel=2,
+        )
+    return TypeAdapter(type_).validate_python(obj)
+
+
+@deprecated(
+    '`schema_of` is deprecated. Use `pydantic.TypeAdapter.json_schema` instead.',
+    category=None,
+)
+def schema_of(
+    type_: Any,
+    *,
+    title: NameFactory | None = None,
+    by_alias: bool = True,
+    ref_template: str = DEFAULT_REF_TEMPLATE,
+    schema_generator: type[GenerateJsonSchema] = GenerateJsonSchema,
+) -> dict[str, Any]:
+    """Generate a JSON schema (as dict) for the passed model or dynamically generated one."""
+    warnings.warn(
+        '`schema_of` is deprecated. Use `pydantic.TypeAdapter.json_schema` instead.',
+        category=PydanticDeprecatedSince20,
+        stacklevel=2,
+    )
+    res = TypeAdapter(type_).json_schema(
+        by_alias=by_alias,
+        schema_generator=schema_generator,
+        ref_template=ref_template,
+    )
+    if title is not None:
+        if isinstance(title, str):
+            res['title'] = title
+        else:
+            warnings.warn(
+                'Passing a callable for the `title` parameter is deprecated and no longer supported',
+                DeprecationWarning,
+                stacklevel=2,
+            )
+            res['title'] = title(type_)
+    return res
+
+
+@deprecated(
+    '`schema_json_of` is deprecated. Use `pydantic.TypeAdapter.json_schema` instead.',
+    category=None,
+)
+def schema_json_of(
+    type_: Any,
+    *,
+    title: NameFactory | None = None,
+    by_alias: bool = True,
+    ref_template: str = DEFAULT_REF_TEMPLATE,
+    schema_generator: type[GenerateJsonSchema] = GenerateJsonSchema,
+    **dumps_kwargs: Any,
+) -> str:
+    """Generate a JSON schema (as JSON) for the passed model or dynamically generated one."""
+    warnings.warn(
+        '`schema_json_of` is deprecated. Use `pydantic.TypeAdapter.json_schema` instead.',
+        category=PydanticDeprecatedSince20,
+        stacklevel=2,
+    )
+    return json.dumps(
+        schema_of(type_, title=title, by_alias=by_alias, ref_template=ref_template, schema_generator=schema_generator),
+        **dumps_kwargs,
+    )