about summary refs log tree commit diff
path: root/.venv/lib/python3.12/site-packages/pydantic/v1
diff options
context:
space:
mode:
authorS. Solomon Darnell2025-03-28 21:52:21 -0500
committerS. Solomon Darnell2025-03-28 21:52:21 -0500
commit4a52a71956a8d46fcb7294ac71734504bb09bcc2 (patch)
treeee3dc5af3b6313e921cd920906356f5d4febc4ed /.venv/lib/python3.12/site-packages/pydantic/v1
parentcc961e04ba734dd72309fb548a2f97d67d578813 (diff)
downloadgn-ai-master.tar.gz
two version of R2R are here HEAD master
Diffstat (limited to '.venv/lib/python3.12/site-packages/pydantic/v1')
-rw-r--r--.venv/lib/python3.12/site-packages/pydantic/v1/__init__.py131
-rw-r--r--.venv/lib/python3.12/site-packages/pydantic/v1/_hypothesis_plugin.py391
-rw-r--r--.venv/lib/python3.12/site-packages/pydantic/v1/annotated_types.py72
-rw-r--r--.venv/lib/python3.12/site-packages/pydantic/v1/class_validators.py361
-rw-r--r--.venv/lib/python3.12/site-packages/pydantic/v1/color.py494
-rw-r--r--.venv/lib/python3.12/site-packages/pydantic/v1/config.py191
-rw-r--r--.venv/lib/python3.12/site-packages/pydantic/v1/dataclasses.py500
-rw-r--r--.venv/lib/python3.12/site-packages/pydantic/v1/datetime_parse.py248
-rw-r--r--.venv/lib/python3.12/site-packages/pydantic/v1/decorator.py264
-rw-r--r--.venv/lib/python3.12/site-packages/pydantic/v1/env_settings.py350
-rw-r--r--.venv/lib/python3.12/site-packages/pydantic/v1/error_wrappers.py161
-rw-r--r--.venv/lib/python3.12/site-packages/pydantic/v1/errors.py646
-rw-r--r--.venv/lib/python3.12/site-packages/pydantic/v1/fields.py1253
-rw-r--r--.venv/lib/python3.12/site-packages/pydantic/v1/generics.py400
-rw-r--r--.venv/lib/python3.12/site-packages/pydantic/v1/json.py112
-rw-r--r--.venv/lib/python3.12/site-packages/pydantic/v1/main.py1107
-rw-r--r--.venv/lib/python3.12/site-packages/pydantic/v1/mypy.py949
-rw-r--r--.venv/lib/python3.12/site-packages/pydantic/v1/networks.py747
-rw-r--r--.venv/lib/python3.12/site-packages/pydantic/v1/parse.py66
-rw-r--r--.venv/lib/python3.12/site-packages/pydantic/v1/py.typed0
-rw-r--r--.venv/lib/python3.12/site-packages/pydantic/v1/schema.py1163
-rw-r--r--.venv/lib/python3.12/site-packages/pydantic/v1/tools.py92
-rw-r--r--.venv/lib/python3.12/site-packages/pydantic/v1/types.py1205
-rw-r--r--.venv/lib/python3.12/site-packages/pydantic/v1/typing.py608
-rw-r--r--.venv/lib/python3.12/site-packages/pydantic/v1/utils.py804
-rw-r--r--.venv/lib/python3.12/site-packages/pydantic/v1/validators.py768
-rw-r--r--.venv/lib/python3.12/site-packages/pydantic/v1/version.py38
27 files changed, 13121 insertions, 0 deletions
diff --git a/.venv/lib/python3.12/site-packages/pydantic/v1/__init__.py b/.venv/lib/python3.12/site-packages/pydantic/v1/__init__.py
new file mode 100644
index 00000000..6ad3f466
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pydantic/v1/__init__.py
@@ -0,0 +1,131 @@
+# flake8: noqa
+from pydantic.v1 import dataclasses
+from pydantic.v1.annotated_types import create_model_from_namedtuple, create_model_from_typeddict
+from pydantic.v1.class_validators import root_validator, validator
+from pydantic.v1.config import BaseConfig, ConfigDict, Extra
+from pydantic.v1.decorator import validate_arguments
+from pydantic.v1.env_settings import BaseSettings
+from pydantic.v1.error_wrappers import ValidationError
+from pydantic.v1.errors import *
+from pydantic.v1.fields import Field, PrivateAttr, Required
+from pydantic.v1.main import *
+from pydantic.v1.networks import *
+from pydantic.v1.parse import Protocol
+from pydantic.v1.tools import *
+from pydantic.v1.types import *
+from pydantic.v1.version import VERSION, compiled
+
+__version__ = VERSION
+
+# WARNING __all__ from pydantic.errors is not included here, it will be removed as an export here in v2
+# please use "from pydantic.v1.errors import ..." instead
+__all__ = [
+    # annotated types utils
+    'create_model_from_namedtuple',
+    'create_model_from_typeddict',
+    # dataclasses
+    'dataclasses',
+    # class_validators
+    'root_validator',
+    'validator',
+    # config
+    'BaseConfig',
+    'ConfigDict',
+    'Extra',
+    # decorator
+    'validate_arguments',
+    # env_settings
+    'BaseSettings',
+    # error_wrappers
+    'ValidationError',
+    # fields
+    'Field',
+    'Required',
+    # main
+    'BaseModel',
+    'create_model',
+    'validate_model',
+    # network
+    'AnyUrl',
+    'AnyHttpUrl',
+    'FileUrl',
+    'HttpUrl',
+    'stricturl',
+    'EmailStr',
+    'NameEmail',
+    'IPvAnyAddress',
+    'IPvAnyInterface',
+    'IPvAnyNetwork',
+    'PostgresDsn',
+    'CockroachDsn',
+    'AmqpDsn',
+    'RedisDsn',
+    'MongoDsn',
+    'KafkaDsn',
+    'validate_email',
+    # parse
+    'Protocol',
+    # tools
+    'parse_file_as',
+    'parse_obj_as',
+    'parse_raw_as',
+    'schema_of',
+    'schema_json_of',
+    # types
+    'NoneStr',
+    'NoneBytes',
+    'StrBytes',
+    'NoneStrBytes',
+    'StrictStr',
+    'ConstrainedBytes',
+    'conbytes',
+    'ConstrainedList',
+    'conlist',
+    'ConstrainedSet',
+    'conset',
+    'ConstrainedFrozenSet',
+    'confrozenset',
+    'ConstrainedStr',
+    'constr',
+    'PyObject',
+    'ConstrainedInt',
+    'conint',
+    'PositiveInt',
+    'NegativeInt',
+    'NonNegativeInt',
+    'NonPositiveInt',
+    'ConstrainedFloat',
+    'confloat',
+    'PositiveFloat',
+    'NegativeFloat',
+    'NonNegativeFloat',
+    'NonPositiveFloat',
+    'FiniteFloat',
+    'ConstrainedDecimal',
+    'condecimal',
+    'ConstrainedDate',
+    'condate',
+    'UUID1',
+    'UUID3',
+    'UUID4',
+    'UUID5',
+    'FilePath',
+    'DirectoryPath',
+    'Json',
+    'JsonWrapper',
+    'SecretField',
+    'SecretStr',
+    'SecretBytes',
+    'StrictBool',
+    'StrictBytes',
+    'StrictInt',
+    'StrictFloat',
+    'PaymentCardNumber',
+    'PrivateAttr',
+    'ByteSize',
+    'PastDate',
+    'FutureDate',
+    # version
+    'compiled',
+    'VERSION',
+]
diff --git a/.venv/lib/python3.12/site-packages/pydantic/v1/_hypothesis_plugin.py b/.venv/lib/python3.12/site-packages/pydantic/v1/_hypothesis_plugin.py
new file mode 100644
index 00000000..b62234d5
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pydantic/v1/_hypothesis_plugin.py
@@ -0,0 +1,391 @@
+"""
+Register Hypothesis strategies for Pydantic custom types.
+
+This enables fully-automatic generation of test data for most Pydantic classes.
+
+Note that this module has *no* runtime impact on Pydantic itself; instead it
+is registered as a setuptools entry point and Hypothesis will import it if
+Pydantic is installed.  See also:
+
+https://hypothesis.readthedocs.io/en/latest/strategies.html#registering-strategies-via-setuptools-entry-points
+https://hypothesis.readthedocs.io/en/latest/data.html#hypothesis.strategies.register_type_strategy
+https://hypothesis.readthedocs.io/en/latest/strategies.html#interaction-with-pytest-cov
+https://docs.pydantic.dev/usage/types/#pydantic-types
+
+Note that because our motivation is to *improve user experience*, the strategies
+are always sound (never generate invalid data) but sacrifice completeness for
+maintainability (ie may be unable to generate some tricky but valid data).
+
+Finally, this module makes liberal use of `# type: ignore[<code>]` pragmas.
+This is because Hypothesis annotates `register_type_strategy()` with
+`(T, SearchStrategy[T])`, but in most cases we register e.g. `ConstrainedInt`
+to generate instances of the builtin `int` type which match the constraints.
+"""
+
+import contextlib
+import datetime
+import ipaddress
+import json
+import math
+from fractions import Fraction
+from typing import Callable, Dict, Type, Union, cast, overload
+
+import hypothesis.strategies as st
+
+import pydantic
+import pydantic.color
+import pydantic.types
+from pydantic.v1.utils import lenient_issubclass
+
+# FilePath and DirectoryPath are explicitly unsupported, as we'd have to create
+# them on-disk, and that's unsafe in general without being told *where* to do so.
+#
+# URLs are unsupported because it's easy for users to define their own strategy for
+# "normal" URLs, and hard for us to define a general strategy which includes "weird"
+# URLs but doesn't also have unpredictable performance problems.
+#
+# conlist() and conset() are unsupported for now, because the workarounds for
+# Cython and Hypothesis to handle parametrized generic types are incompatible.
+# We are rethinking Hypothesis compatibility in Pydantic v2.
+
+# Emails
+try:
+    import email_validator
+except ImportError:  # pragma: no cover
+    pass
+else:
+
+    def is_valid_email(s: str) -> bool:
+        # Hypothesis' st.emails() occasionally generates emails like 0@A0--0.ac
+        # that are invalid according to email-validator, so we filter those out.
+        try:
+            email_validator.validate_email(s, check_deliverability=False)
+            return True
+        except email_validator.EmailNotValidError:  # pragma: no cover
+            return False
+
+    # Note that these strategies deliberately stay away from any tricky Unicode
+    # or other encoding issues; we're just trying to generate *something* valid.
+    st.register_type_strategy(pydantic.EmailStr, st.emails().filter(is_valid_email))  # type: ignore[arg-type]
+    st.register_type_strategy(
+        pydantic.NameEmail,
+        st.builds(
+            '{} <{}>'.format,  # type: ignore[arg-type]
+            st.from_regex('[A-Za-z0-9_]+( [A-Za-z0-9_]+){0,5}', fullmatch=True),
+            st.emails().filter(is_valid_email),
+        ),
+    )
+
+# PyObject - dotted names, in this case taken from the math module.
+st.register_type_strategy(
+    pydantic.PyObject,  # type: ignore[arg-type]
+    st.sampled_from(
+        [cast(pydantic.PyObject, f'math.{name}') for name in sorted(vars(math)) if not name.startswith('_')]
+    ),
+)
+
+# CSS3 Colors; as name, hex, rgb(a) tuples or strings, or hsl strings
+_color_regexes = (
+    '|'.join(
+        (
+            pydantic.color.r_hex_short,
+            pydantic.color.r_hex_long,
+            pydantic.color.r_rgb,
+            pydantic.color.r_rgba,
+            pydantic.color.r_hsl,
+            pydantic.color.r_hsla,
+        )
+    )
+    # Use more precise regex patterns to avoid value-out-of-range errors
+    .replace(pydantic.color._r_sl, r'(?:(\d\d?(?:\.\d+)?|100(?:\.0+)?)%)')
+    .replace(pydantic.color._r_alpha, r'(?:(0(?:\.\d+)?|1(?:\.0+)?|\.\d+|\d{1,2}%))')
+    .replace(pydantic.color._r_255, r'(?:((?:\d|\d\d|[01]\d\d|2[0-4]\d|25[0-4])(?:\.\d+)?|255(?:\.0+)?))')
+)
+st.register_type_strategy(
+    pydantic.color.Color,
+    st.one_of(
+        st.sampled_from(sorted(pydantic.color.COLORS_BY_NAME)),
+        st.tuples(
+            st.integers(0, 255),
+            st.integers(0, 255),
+            st.integers(0, 255),
+            st.none() | st.floats(0, 1) | st.floats(0, 100).map('{}%'.format),
+        ),
+        st.from_regex(_color_regexes, fullmatch=True),
+    ),
+)
+
+
+# Card numbers, valid according to the Luhn algorithm
+
+
+def add_luhn_digit(card_number: str) -> str:
+    # See https://en.wikipedia.org/wiki/Luhn_algorithm
+    for digit in '0123456789':
+        with contextlib.suppress(Exception):
+            pydantic.PaymentCardNumber.validate_luhn_check_digit(card_number + digit)
+            return card_number + digit
+    raise AssertionError('Unreachable')  # pragma: no cover
+
+
+card_patterns = (
+    # Note that these patterns omit the Luhn check digit; that's added by the function above
+    '4[0-9]{14}',  # Visa
+    '5[12345][0-9]{13}',  # Mastercard
+    '3[47][0-9]{12}',  # American Express
+    '[0-26-9][0-9]{10,17}',  # other (incomplete to avoid overlap)
+)
+st.register_type_strategy(
+    pydantic.PaymentCardNumber,
+    st.from_regex('|'.join(card_patterns), fullmatch=True).map(add_luhn_digit),  # type: ignore[arg-type]
+)
+
+# UUIDs
+st.register_type_strategy(pydantic.UUID1, st.uuids(version=1))
+st.register_type_strategy(pydantic.UUID3, st.uuids(version=3))
+st.register_type_strategy(pydantic.UUID4, st.uuids(version=4))
+st.register_type_strategy(pydantic.UUID5, st.uuids(version=5))
+
+# Secrets
+st.register_type_strategy(pydantic.SecretBytes, st.binary().map(pydantic.SecretBytes))
+st.register_type_strategy(pydantic.SecretStr, st.text().map(pydantic.SecretStr))
+
+# IP addresses, networks, and interfaces
+st.register_type_strategy(pydantic.IPvAnyAddress, st.ip_addresses())  # type: ignore[arg-type]
+st.register_type_strategy(
+    pydantic.IPvAnyInterface,
+    st.from_type(ipaddress.IPv4Interface) | st.from_type(ipaddress.IPv6Interface),  # type: ignore[arg-type]
+)
+st.register_type_strategy(
+    pydantic.IPvAnyNetwork,
+    st.from_type(ipaddress.IPv4Network) | st.from_type(ipaddress.IPv6Network),  # type: ignore[arg-type]
+)
+
+# We hook into the con***() functions and the ConstrainedNumberMeta metaclass,
+# so here we only have to register subclasses for other constrained types which
+# don't go via those mechanisms.  Then there are the registration hooks below.
+st.register_type_strategy(pydantic.StrictBool, st.booleans())
+st.register_type_strategy(pydantic.StrictStr, st.text())
+
+
+# FutureDate, PastDate
+st.register_type_strategy(pydantic.FutureDate, st.dates(min_value=datetime.date.today() + datetime.timedelta(days=1)))
+st.register_type_strategy(pydantic.PastDate, st.dates(max_value=datetime.date.today() - datetime.timedelta(days=1)))
+
+
+# Constrained-type resolver functions
+#
+# For these ones, we actually want to inspect the type in order to work out a
+# satisfying strategy.  First up, the machinery for tracking resolver functions:
+
+RESOLVERS: Dict[type, Callable[[type], st.SearchStrategy]] = {}  # type: ignore[type-arg]
+
+
+@overload
+def _registered(typ: Type[pydantic.types.T]) -> Type[pydantic.types.T]:
+    pass
+
+
+@overload
+def _registered(typ: pydantic.types.ConstrainedNumberMeta) -> pydantic.types.ConstrainedNumberMeta:
+    pass
+
+
+def _registered(
+    typ: Union[Type[pydantic.types.T], pydantic.types.ConstrainedNumberMeta]
+) -> Union[Type[pydantic.types.T], pydantic.types.ConstrainedNumberMeta]:
+    # This function replaces the version in `pydantic.types`, in order to
+    # effect the registration of new constrained types so that Hypothesis
+    # can generate valid examples.
+    pydantic.types._DEFINED_TYPES.add(typ)
+    for supertype, resolver in RESOLVERS.items():
+        if issubclass(typ, supertype):
+            st.register_type_strategy(typ, resolver(typ))  # type: ignore
+            return typ
+    raise NotImplementedError(f'Unknown type {typ!r} has no resolver to register')  # pragma: no cover
+
+
+def resolves(
+    typ: Union[type, pydantic.types.ConstrainedNumberMeta]
+) -> Callable[[Callable[..., st.SearchStrategy]], Callable[..., st.SearchStrategy]]:  # type: ignore[type-arg]
+    def inner(f):  # type: ignore
+        assert f not in RESOLVERS
+        RESOLVERS[typ] = f
+        return f
+
+    return inner
+
+
+# Type-to-strategy resolver functions
+
+
+@resolves(pydantic.JsonWrapper)
+def resolve_json(cls):  # type: ignore[no-untyped-def]
+    try:
+        inner = st.none() if cls.inner_type is None else st.from_type(cls.inner_type)
+    except Exception:  # pragma: no cover
+        finite = st.floats(allow_infinity=False, allow_nan=False)
+        inner = st.recursive(
+            base=st.one_of(st.none(), st.booleans(), st.integers(), finite, st.text()),
+            extend=lambda x: st.lists(x) | st.dictionaries(st.text(), x),  # type: ignore
+        )
+    inner_type = getattr(cls, 'inner_type', None)
+    return st.builds(
+        cls.inner_type.json if lenient_issubclass(inner_type, pydantic.BaseModel) else json.dumps,
+        inner,
+        ensure_ascii=st.booleans(),
+        indent=st.none() | st.integers(0, 16),
+        sort_keys=st.booleans(),
+    )
+
+
+@resolves(pydantic.ConstrainedBytes)
+def resolve_conbytes(cls):  # type: ignore[no-untyped-def]  # pragma: no cover
+    min_size = cls.min_length or 0
+    max_size = cls.max_length
+    if not cls.strip_whitespace:
+        return st.binary(min_size=min_size, max_size=max_size)
+    # Fun with regex to ensure we neither start nor end with whitespace
+    repeats = '{{{},{}}}'.format(
+        min_size - 2 if min_size > 2 else 0,
+        max_size - 2 if (max_size or 0) > 2 else '',
+    )
+    if min_size >= 2:
+        pattern = rf'\W.{repeats}\W'
+    elif min_size == 1:
+        pattern = rf'\W(.{repeats}\W)?'
+    else:
+        assert min_size == 0
+        pattern = rf'(\W(.{repeats}\W)?)?'
+    return st.from_regex(pattern.encode(), fullmatch=True)
+
+
+@resolves(pydantic.ConstrainedDecimal)
+def resolve_condecimal(cls):  # type: ignore[no-untyped-def]
+    min_value = cls.ge
+    max_value = cls.le
+    if cls.gt is not None:
+        assert min_value is None, 'Set `gt` or `ge`, but not both'
+        min_value = cls.gt
+    if cls.lt is not None:
+        assert max_value is None, 'Set `lt` or `le`, but not both'
+        max_value = cls.lt
+    s = st.decimals(min_value, max_value, allow_nan=False, places=cls.decimal_places)
+    if cls.lt is not None:
+        s = s.filter(lambda d: d < cls.lt)
+    if cls.gt is not None:
+        s = s.filter(lambda d: cls.gt < d)
+    return s
+
+
+@resolves(pydantic.ConstrainedFloat)
+def resolve_confloat(cls):  # type: ignore[no-untyped-def]
+    min_value = cls.ge
+    max_value = cls.le
+    exclude_min = False
+    exclude_max = False
+
+    if cls.gt is not None:
+        assert min_value is None, 'Set `gt` or `ge`, but not both'
+        min_value = cls.gt
+        exclude_min = True
+    if cls.lt is not None:
+        assert max_value is None, 'Set `lt` or `le`, but not both'
+        max_value = cls.lt
+        exclude_max = True
+
+    if cls.multiple_of is None:
+        return st.floats(min_value, max_value, exclude_min=exclude_min, exclude_max=exclude_max, allow_nan=False)
+
+    if min_value is not None:
+        min_value = math.ceil(min_value / cls.multiple_of)
+        if exclude_min:
+            min_value = min_value + 1
+    if max_value is not None:
+        assert max_value >= cls.multiple_of, 'Cannot build model with max value smaller than multiple of'
+        max_value = math.floor(max_value / cls.multiple_of)
+        if exclude_max:
+            max_value = max_value - 1
+
+    return st.integers(min_value, max_value).map(lambda x: x * cls.multiple_of)
+
+
+@resolves(pydantic.ConstrainedInt)
+def resolve_conint(cls):  # type: ignore[no-untyped-def]
+    min_value = cls.ge
+    max_value = cls.le
+    if cls.gt is not None:
+        assert min_value is None, 'Set `gt` or `ge`, but not both'
+        min_value = cls.gt + 1
+    if cls.lt is not None:
+        assert max_value is None, 'Set `lt` or `le`, but not both'
+        max_value = cls.lt - 1
+
+    if cls.multiple_of is None or cls.multiple_of == 1:
+        return st.integers(min_value, max_value)
+
+    # These adjustments and the .map handle integer-valued multiples, while the
+    # .filter handles trickier cases as for confloat.
+    if min_value is not None:
+        min_value = math.ceil(Fraction(min_value) / Fraction(cls.multiple_of))
+    if max_value is not None:
+        max_value = math.floor(Fraction(max_value) / Fraction(cls.multiple_of))
+    return st.integers(min_value, max_value).map(lambda x: x * cls.multiple_of)
+
+
+@resolves(pydantic.ConstrainedDate)
+def resolve_condate(cls):  # type: ignore[no-untyped-def]
+    if cls.ge is not None:
+        assert cls.gt is None, 'Set `gt` or `ge`, but not both'
+        min_value = cls.ge
+    elif cls.gt is not None:
+        min_value = cls.gt + datetime.timedelta(days=1)
+    else:
+        min_value = datetime.date.min
+    if cls.le is not None:
+        assert cls.lt is None, 'Set `lt` or `le`, but not both'
+        max_value = cls.le
+    elif cls.lt is not None:
+        max_value = cls.lt - datetime.timedelta(days=1)
+    else:
+        max_value = datetime.date.max
+    return st.dates(min_value, max_value)
+
+
+@resolves(pydantic.ConstrainedStr)
+def resolve_constr(cls):  # type: ignore[no-untyped-def]  # pragma: no cover
+    min_size = cls.min_length or 0
+    max_size = cls.max_length
+
+    if cls.regex is None and not cls.strip_whitespace:
+        return st.text(min_size=min_size, max_size=max_size)
+
+    if cls.regex is not None:
+        strategy = st.from_regex(cls.regex)
+        if cls.strip_whitespace:
+            strategy = strategy.filter(lambda s: s == s.strip())
+    elif cls.strip_whitespace:
+        repeats = '{{{},{}}}'.format(
+            min_size - 2 if min_size > 2 else 0,
+            max_size - 2 if (max_size or 0) > 2 else '',
+        )
+        if min_size >= 2:
+            strategy = st.from_regex(rf'\W.{repeats}\W')
+        elif min_size == 1:
+            strategy = st.from_regex(rf'\W(.{repeats}\W)?')
+        else:
+            assert min_size == 0
+            strategy = st.from_regex(rf'(\W(.{repeats}\W)?)?')
+
+    if min_size == 0 and max_size is None:
+        return strategy
+    elif max_size is None:
+        return strategy.filter(lambda s: min_size <= len(s))
+    return strategy.filter(lambda s: min_size <= len(s) <= max_size)
+
+
+# Finally, register all previously-defined types, and patch in our new function
+for typ in list(pydantic.types._DEFINED_TYPES):
+    _registered(typ)
+pydantic.types._registered = _registered
+st.register_type_strategy(pydantic.Json, resolve_json)
diff --git a/.venv/lib/python3.12/site-packages/pydantic/v1/annotated_types.py b/.venv/lib/python3.12/site-packages/pydantic/v1/annotated_types.py
new file mode 100644
index 00000000..d9eaaafd
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pydantic/v1/annotated_types.py
@@ -0,0 +1,72 @@
+import sys
+from typing import TYPE_CHECKING, Any, Dict, FrozenSet, NamedTuple, Type
+
+from pydantic.v1.fields import Required
+from pydantic.v1.main import BaseModel, create_model
+from pydantic.v1.typing import is_typeddict, is_typeddict_special
+
+if TYPE_CHECKING:
+    from typing_extensions import TypedDict
+
+if sys.version_info < (3, 11):
+
+    def is_legacy_typeddict(typeddict_cls: Type['TypedDict']) -> bool:  # type: ignore[valid-type]
+        return is_typeddict(typeddict_cls) and type(typeddict_cls).__module__ == 'typing'
+
+else:
+
+    def is_legacy_typeddict(_: Any) -> Any:
+        return False
+
+
+def create_model_from_typeddict(
+    # Mypy bug: `Type[TypedDict]` is resolved as `Any` https://github.com/python/mypy/issues/11030
+    typeddict_cls: Type['TypedDict'],  # type: ignore[valid-type]
+    **kwargs: Any,
+) -> Type['BaseModel']:
+    """
+    Create a `BaseModel` based on the fields of a `TypedDict`.
+    Since `typing.TypedDict` in Python 3.8 does not store runtime information about optional keys,
+    we raise an error if this happens (see https://bugs.python.org/issue38834).
+    """
+    field_definitions: Dict[str, Any]
+
+    # Best case scenario: with python 3.9+ or when `TypedDict` is imported from `typing_extensions`
+    if not hasattr(typeddict_cls, '__required_keys__'):
+        raise TypeError(
+            'You should use `typing_extensions.TypedDict` instead of `typing.TypedDict` with Python < 3.9.2. '
+            'Without it, there is no way to differentiate required and optional fields when subclassed.'
+        )
+
+    if is_legacy_typeddict(typeddict_cls) and any(
+        is_typeddict_special(t) for t in typeddict_cls.__annotations__.values()
+    ):
+        raise TypeError(
+            'You should use `typing_extensions.TypedDict` instead of `typing.TypedDict` with Python < 3.11. '
+            'Without it, there is no way to reflect Required/NotRequired keys.'
+        )
+
+    required_keys: FrozenSet[str] = typeddict_cls.__required_keys__  # type: ignore[attr-defined]
+    field_definitions = {
+        field_name: (field_type, Required if field_name in required_keys else None)
+        for field_name, field_type in typeddict_cls.__annotations__.items()
+    }
+
+    return create_model(typeddict_cls.__name__, **kwargs, **field_definitions)
+
+
+def create_model_from_namedtuple(namedtuple_cls: Type['NamedTuple'], **kwargs: Any) -> Type['BaseModel']:
+    """
+    Create a `BaseModel` based on the fields of a named tuple.
+    A named tuple can be created with `typing.NamedTuple` and declared annotations
+    but also with `collections.namedtuple`, in this case we consider all fields
+    to have type `Any`.
+    """
+    # With python 3.10+, `__annotations__` always exists but can be empty hence the `getattr... or...` logic
+    namedtuple_annotations: Dict[str, Type[Any]] = getattr(namedtuple_cls, '__annotations__', None) or {
+        k: Any for k in namedtuple_cls._fields
+    }
+    field_definitions: Dict[str, Any] = {
+        field_name: (field_type, Required) for field_name, field_type in namedtuple_annotations.items()
+    }
+    return create_model(namedtuple_cls.__name__, **kwargs, **field_definitions)
diff --git a/.venv/lib/python3.12/site-packages/pydantic/v1/class_validators.py b/.venv/lib/python3.12/site-packages/pydantic/v1/class_validators.py
new file mode 100644
index 00000000..2f68fc86
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pydantic/v1/class_validators.py
@@ -0,0 +1,361 @@
+import warnings
+from collections import ChainMap
+from functools import partial, partialmethod, wraps
+from itertools import chain
+from types import FunctionType
+from typing import TYPE_CHECKING, Any, Callable, Dict, Iterable, List, Optional, Set, Tuple, Type, Union, overload
+
+from pydantic.v1.errors import ConfigError
+from pydantic.v1.typing import AnyCallable
+from pydantic.v1.utils import ROOT_KEY, in_ipython
+
+if TYPE_CHECKING:
+    from pydantic.v1.typing import AnyClassMethod
+
+
+class Validator:
+    __slots__ = 'func', 'pre', 'each_item', 'always', 'check_fields', 'skip_on_failure'
+
+    def __init__(
+        self,
+        func: AnyCallable,
+        pre: bool = False,
+        each_item: bool = False,
+        always: bool = False,
+        check_fields: bool = False,
+        skip_on_failure: bool = False,
+    ):
+        self.func = func
+        self.pre = pre
+        self.each_item = each_item
+        self.always = always
+        self.check_fields = check_fields
+        self.skip_on_failure = skip_on_failure
+
+
+if TYPE_CHECKING:
+    from inspect import Signature
+
+    from pydantic.v1.config import BaseConfig
+    from pydantic.v1.fields import ModelField
+    from pydantic.v1.types import ModelOrDc
+
+    ValidatorCallable = Callable[[Optional[ModelOrDc], Any, Dict[str, Any], ModelField, Type[BaseConfig]], Any]
+    ValidatorsList = List[ValidatorCallable]
+    ValidatorListDict = Dict[str, List[Validator]]
+
+_FUNCS: Set[str] = set()
+VALIDATOR_CONFIG_KEY = '__validator_config__'
+ROOT_VALIDATOR_CONFIG_KEY = '__root_validator_config__'
+
+
+def validator(
+    *fields: str,
+    pre: bool = False,
+    each_item: bool = False,
+    always: bool = False,
+    check_fields: bool = True,
+    whole: Optional[bool] = None,
+    allow_reuse: bool = False,
+) -> Callable[[AnyCallable], 'AnyClassMethod']:
+    """
+    Decorate methods on the class indicating that they should be used to validate fields
+    :param fields: which field(s) the method should be called on
+    :param pre: whether or not this validator should be called before the standard validators (else after)
+    :param each_item: for complex objects (sets, lists etc.) whether to validate individual elements rather than the
+      whole object
+    :param always: whether this method and other validators should be called even if the value is missing
+    :param check_fields: whether to check that the fields actually exist on the model
+    :param allow_reuse: whether to track and raise an error if another validator refers to the decorated function
+    """
+    if not fields:
+        raise ConfigError('validator with no fields specified')
+    elif isinstance(fields[0], FunctionType):
+        raise ConfigError(
+            "validators should be used with fields and keyword arguments, not bare. "  # noqa: Q000
+            "E.g. usage should be `@validator('<field_name>', ...)`"
+        )
+    elif not all(isinstance(field, str) for field in fields):
+        raise ConfigError(
+            "validator fields should be passed as separate string args. "  # noqa: Q000
+            "E.g. usage should be `@validator('<field_name_1>', '<field_name_2>', ...)`"
+        )
+
+    if whole is not None:
+        warnings.warn(
+            'The "whole" keyword argument is deprecated, use "each_item" (inverse meaning, default False) instead',
+            DeprecationWarning,
+        )
+        assert each_item is False, '"each_item" and "whole" conflict, remove "whole"'
+        each_item = not whole
+
+    def dec(f: AnyCallable) -> 'AnyClassMethod':
+        f_cls = _prepare_validator(f, allow_reuse)
+        setattr(
+            f_cls,
+            VALIDATOR_CONFIG_KEY,
+            (
+                fields,
+                Validator(func=f_cls.__func__, pre=pre, each_item=each_item, always=always, check_fields=check_fields),
+            ),
+        )
+        return f_cls
+
+    return dec
+
+
+@overload
+def root_validator(_func: AnyCallable) -> 'AnyClassMethod':
+    ...
+
+
+@overload
+def root_validator(
+    *, pre: bool = False, allow_reuse: bool = False, skip_on_failure: bool = False
+) -> Callable[[AnyCallable], 'AnyClassMethod']:
+    ...
+
+
+def root_validator(
+    _func: Optional[AnyCallable] = None, *, pre: bool = False, allow_reuse: bool = False, skip_on_failure: bool = False
+) -> Union['AnyClassMethod', Callable[[AnyCallable], 'AnyClassMethod']]:
+    """
+    Decorate methods on a model indicating that they should be used to validate (and perhaps modify) data either
+    before or after standard model parsing/validation is performed.
+    """
+    if _func:
+        f_cls = _prepare_validator(_func, allow_reuse)
+        setattr(
+            f_cls, ROOT_VALIDATOR_CONFIG_KEY, Validator(func=f_cls.__func__, pre=pre, skip_on_failure=skip_on_failure)
+        )
+        return f_cls
+
+    def dec(f: AnyCallable) -> 'AnyClassMethod':
+        f_cls = _prepare_validator(f, allow_reuse)
+        setattr(
+            f_cls, ROOT_VALIDATOR_CONFIG_KEY, Validator(func=f_cls.__func__, pre=pre, skip_on_failure=skip_on_failure)
+        )
+        return f_cls
+
+    return dec
+
+
+def _prepare_validator(function: AnyCallable, allow_reuse: bool) -> 'AnyClassMethod':
+    """
+    Avoid validators with duplicated names since without this, validators can be overwritten silently
+    which generally isn't the intended behaviour, don't run in ipython (see #312) or if allow_reuse is False.
+    """
+    f_cls = function if isinstance(function, classmethod) else classmethod(function)
+    if not in_ipython() and not allow_reuse:
+        ref = (
+            getattr(f_cls.__func__, '__module__', '<No __module__>')
+            + '.'
+            + getattr(f_cls.__func__, '__qualname__', f'<No __qualname__: id:{id(f_cls.__func__)}>')
+        )
+        if ref in _FUNCS:
+            raise ConfigError(f'duplicate validator function "{ref}"; if this is intended, set `allow_reuse=True`')
+        _FUNCS.add(ref)
+    return f_cls
+
+
+class ValidatorGroup:
+    def __init__(self, validators: 'ValidatorListDict') -> None:
+        self.validators = validators
+        self.used_validators = {'*'}
+
+    def get_validators(self, name: str) -> Optional[Dict[str, Validator]]:
+        self.used_validators.add(name)
+        validators = self.validators.get(name, [])
+        if name != ROOT_KEY:
+            validators += self.validators.get('*', [])
+        if validators:
+            return {getattr(v.func, '__name__', f'<No __name__: id:{id(v.func)}>'): v for v in validators}
+        else:
+            return None
+
+    def check_for_unused(self) -> None:
+        unused_validators = set(
+            chain.from_iterable(
+                (
+                    getattr(v.func, '__name__', f'<No __name__: id:{id(v.func)}>')
+                    for v in self.validators[f]
+                    if v.check_fields
+                )
+                for f in (self.validators.keys() - self.used_validators)
+            )
+        )
+        if unused_validators:
+            fn = ', '.join(unused_validators)
+            raise ConfigError(
+                f"Validators defined with incorrect fields: {fn} "  # noqa: Q000
+                f"(use check_fields=False if you're inheriting from the model and intended this)"
+            )
+
+
+def extract_validators(namespace: Dict[str, Any]) -> Dict[str, List[Validator]]:
+    validators: Dict[str, List[Validator]] = {}
+    for var_name, value in namespace.items():
+        validator_config = getattr(value, VALIDATOR_CONFIG_KEY, None)
+        if validator_config:
+            fields, v = validator_config
+            for field in fields:
+                if field in validators:
+                    validators[field].append(v)
+                else:
+                    validators[field] = [v]
+    return validators
+
+
+def extract_root_validators(namespace: Dict[str, Any]) -> Tuple[List[AnyCallable], List[Tuple[bool, AnyCallable]]]:
+    from inspect import signature
+
+    pre_validators: List[AnyCallable] = []
+    post_validators: List[Tuple[bool, AnyCallable]] = []
+    for name, value in namespace.items():
+        validator_config: Optional[Validator] = getattr(value, ROOT_VALIDATOR_CONFIG_KEY, None)
+        if validator_config:
+            sig = signature(validator_config.func)
+            args = list(sig.parameters.keys())
+            if args[0] == 'self':
+                raise ConfigError(
+                    f'Invalid signature for root validator {name}: {sig}, "self" not permitted as first argument, '
+                    f'should be: (cls, values).'
+                )
+            if len(args) != 2:
+                raise ConfigError(f'Invalid signature for root validator {name}: {sig}, should be: (cls, values).')
+            # check function signature
+            if validator_config.pre:
+                pre_validators.append(validator_config.func)
+            else:
+                post_validators.append((validator_config.skip_on_failure, validator_config.func))
+    return pre_validators, post_validators
+
+
+def inherit_validators(base_validators: 'ValidatorListDict', validators: 'ValidatorListDict') -> 'ValidatorListDict':
+    for field, field_validators in base_validators.items():
+        if field not in validators:
+            validators[field] = []
+        validators[field] += field_validators
+    return validators
+
+
+def make_generic_validator(validator: AnyCallable) -> 'ValidatorCallable':
+    """
+    Make a generic function which calls a validator with the right arguments.
+
+    Unfortunately other approaches (eg. return a partial of a function that builds the arguments) is slow,
+    hence this laborious way of doing things.
+
+    It's done like this so validators don't all need **kwargs in their signature, eg. any combination of
+    the arguments "values", "fields" and/or "config" are permitted.
+    """
+    from inspect import signature
+
+    if not isinstance(validator, (partial, partialmethod)):
+        # This should be the default case, so overhead is reduced
+        sig = signature(validator)
+        args = list(sig.parameters.keys())
+    else:
+        # Fix the generated argument lists of partial methods
+        sig = signature(validator.func)
+        args = [
+            k
+            for k in signature(validator.func).parameters.keys()
+            if k not in validator.args | validator.keywords.keys()
+        ]
+
+    first_arg = args.pop(0)
+    if first_arg == 'self':
+        raise ConfigError(
+            f'Invalid signature for validator {validator}: {sig}, "self" not permitted as first argument, '
+            f'should be: (cls, value, values, config, field), "values", "config" and "field" are all optional.'
+        )
+    elif first_arg == 'cls':
+        # assume the second argument is value
+        return wraps(validator)(_generic_validator_cls(validator, sig, set(args[1:])))
+    else:
+        # assume the first argument was value which has already been removed
+        return wraps(validator)(_generic_validator_basic(validator, sig, set(args)))
+
+
+def prep_validators(v_funcs: Iterable[AnyCallable]) -> 'ValidatorsList':
+    return [make_generic_validator(f) for f in v_funcs if f]
+
+
+all_kwargs = {'values', 'field', 'config'}
+
+
+def _generic_validator_cls(validator: AnyCallable, sig: 'Signature', args: Set[str]) -> 'ValidatorCallable':
+    # assume the first argument is value
+    has_kwargs = False
+    if 'kwargs' in args:
+        has_kwargs = True
+        args -= {'kwargs'}
+
+    if not args.issubset(all_kwargs):
+        raise ConfigError(
+            f'Invalid signature for validator {validator}: {sig}, should be: '
+            f'(cls, value, values, config, field), "values", "config" and "field" are all optional.'
+        )
+
+    if has_kwargs:
+        return lambda cls, v, values, field, config: validator(cls, v, values=values, field=field, config=config)
+    elif args == set():
+        return lambda cls, v, values, field, config: validator(cls, v)
+    elif args == {'values'}:
+        return lambda cls, v, values, field, config: validator(cls, v, values=values)
+    elif args == {'field'}:
+        return lambda cls, v, values, field, config: validator(cls, v, field=field)
+    elif args == {'config'}:
+        return lambda cls, v, values, field, config: validator(cls, v, config=config)
+    elif args == {'values', 'field'}:
+        return lambda cls, v, values, field, config: validator(cls, v, values=values, field=field)
+    elif args == {'values', 'config'}:
+        return lambda cls, v, values, field, config: validator(cls, v, values=values, config=config)
+    elif args == {'field', 'config'}:
+        return lambda cls, v, values, field, config: validator(cls, v, field=field, config=config)
+    else:
+        # args == {'values', 'field', 'config'}
+        return lambda cls, v, values, field, config: validator(cls, v, values=values, field=field, config=config)
+
+
+def _generic_validator_basic(validator: AnyCallable, sig: 'Signature', args: Set[str]) -> 'ValidatorCallable':
+    has_kwargs = False
+    if 'kwargs' in args:
+        has_kwargs = True
+        args -= {'kwargs'}
+
+    if not args.issubset(all_kwargs):
+        raise ConfigError(
+            f'Invalid signature for validator {validator}: {sig}, should be: '
+            f'(value, values, config, field), "values", "config" and "field" are all optional.'
+        )
+
+    if has_kwargs:
+        return lambda cls, v, values, field, config: validator(v, values=values, field=field, config=config)
+    elif args == set():
+        return lambda cls, v, values, field, config: validator(v)
+    elif args == {'values'}:
+        return lambda cls, v, values, field, config: validator(v, values=values)
+    elif args == {'field'}:
+        return lambda cls, v, values, field, config: validator(v, field=field)
+    elif args == {'config'}:
+        return lambda cls, v, values, field, config: validator(v, config=config)
+    elif args == {'values', 'field'}:
+        return lambda cls, v, values, field, config: validator(v, values=values, field=field)
+    elif args == {'values', 'config'}:
+        return lambda cls, v, values, field, config: validator(v, values=values, config=config)
+    elif args == {'field', 'config'}:
+        return lambda cls, v, values, field, config: validator(v, field=field, config=config)
+    else:
+        # args == {'values', 'field', 'config'}
+        return lambda cls, v, values, field, config: validator(v, values=values, field=field, config=config)
+
+
+def gather_all_validators(type_: 'ModelOrDc') -> Dict[str, 'AnyClassMethod']:
+    all_attributes = ChainMap(*[cls.__dict__ for cls in type_.__mro__])  # type: ignore[arg-type,var-annotated]
+    return {
+        k: v
+        for k, v in all_attributes.items()
+        if hasattr(v, VALIDATOR_CONFIG_KEY) or hasattr(v, ROOT_VALIDATOR_CONFIG_KEY)
+    }
diff --git a/.venv/lib/python3.12/site-packages/pydantic/v1/color.py b/.venv/lib/python3.12/site-packages/pydantic/v1/color.py
new file mode 100644
index 00000000..b0bbf78f
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pydantic/v1/color.py
@@ -0,0 +1,494 @@
+"""
+Color definitions are  used as per CSS3 specification:
+http://www.w3.org/TR/css3-color/#svg-color
+
+A few colors have multiple names referring to the sames colors, eg. `grey` and `gray` or `aqua` and `cyan`.
+
+In these cases the LAST color when sorted alphabetically takes preferences,
+eg. Color((0, 255, 255)).as_named() == 'cyan' because "cyan" comes after "aqua".
+"""
+import math
+import re
+from colorsys import hls_to_rgb, rgb_to_hls
+from typing import TYPE_CHECKING, Any, Dict, Optional, Tuple, Union, cast
+
+from pydantic.v1.errors import ColorError
+from pydantic.v1.utils import Representation, almost_equal_floats
+
+if TYPE_CHECKING:
+    from pydantic.v1.typing import CallableGenerator, ReprArgs
+
+ColorTuple = Union[Tuple[int, int, int], Tuple[int, int, int, float]]
+ColorType = Union[ColorTuple, str]
+HslColorTuple = Union[Tuple[float, float, float], Tuple[float, float, float, float]]
+
+
+class RGBA:
+    """
+    Internal use only as a representation of a color.
+    """
+
+    __slots__ = 'r', 'g', 'b', 'alpha', '_tuple'
+
+    def __init__(self, r: float, g: float, b: float, alpha: Optional[float]):
+        self.r = r
+        self.g = g
+        self.b = b
+        self.alpha = alpha
+
+        self._tuple: Tuple[float, float, float, Optional[float]] = (r, g, b, alpha)
+
+    def __getitem__(self, item: Any) -> Any:
+        return self._tuple[item]
+
+
+# these are not compiled here to avoid import slowdown, they'll be compiled the first time they're used, then cached
+r_hex_short = r'\s*(?:#|0x)?([0-9a-f])([0-9a-f])([0-9a-f])([0-9a-f])?\s*'
+r_hex_long = r'\s*(?:#|0x)?([0-9a-f]{2})([0-9a-f]{2})([0-9a-f]{2})([0-9a-f]{2})?\s*'
+_r_255 = r'(\d{1,3}(?:\.\d+)?)'
+_r_comma = r'\s*,\s*'
+r_rgb = fr'\s*rgb\(\s*{_r_255}{_r_comma}{_r_255}{_r_comma}{_r_255}\)\s*'
+_r_alpha = r'(\d(?:\.\d+)?|\.\d+|\d{1,2}%)'
+r_rgba = fr'\s*rgba\(\s*{_r_255}{_r_comma}{_r_255}{_r_comma}{_r_255}{_r_comma}{_r_alpha}\s*\)\s*'
+_r_h = r'(-?\d+(?:\.\d+)?|-?\.\d+)(deg|rad|turn)?'
+_r_sl = r'(\d{1,3}(?:\.\d+)?)%'
+r_hsl = fr'\s*hsl\(\s*{_r_h}{_r_comma}{_r_sl}{_r_comma}{_r_sl}\s*\)\s*'
+r_hsla = fr'\s*hsl\(\s*{_r_h}{_r_comma}{_r_sl}{_r_comma}{_r_sl}{_r_comma}{_r_alpha}\s*\)\s*'
+
+# colors where the two hex characters are the same, if all colors match this the short version of hex colors can be used
+repeat_colors = {int(c * 2, 16) for c in '0123456789abcdef'}
+rads = 2 * math.pi
+
+
+class Color(Representation):
+    __slots__ = '_original', '_rgba'
+
+    def __init__(self, value: ColorType) -> None:
+        self._rgba: RGBA
+        self._original: ColorType
+        if isinstance(value, (tuple, list)):
+            self._rgba = parse_tuple(value)
+        elif isinstance(value, str):
+            self._rgba = parse_str(value)
+        elif isinstance(value, Color):
+            self._rgba = value._rgba
+            value = value._original
+        else:
+            raise ColorError(reason='value must be a tuple, list or string')
+
+        # if we've got here value must be a valid color
+        self._original = value
+
+    @classmethod
+    def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None:
+        field_schema.update(type='string', format='color')
+
+    def original(self) -> ColorType:
+        """
+        Original value passed to Color
+        """
+        return self._original
+
+    def as_named(self, *, fallback: bool = False) -> str:
+        if self._rgba.alpha is None:
+            rgb = cast(Tuple[int, int, int], self.as_rgb_tuple())
+            try:
+                return COLORS_BY_VALUE[rgb]
+            except KeyError as e:
+                if fallback:
+                    return self.as_hex()
+                else:
+                    raise ValueError('no named color found, use fallback=True, as_hex() or as_rgb()') from e
+        else:
+            return self.as_hex()
+
+    def as_hex(self) -> str:
+        """
+        Hex string representing the color can be 3, 4, 6 or 8 characters depending on whether the string
+        a "short" representation of the color is possible and whether there's an alpha channel.
+        """
+        values = [float_to_255(c) for c in self._rgba[:3]]
+        if self._rgba.alpha is not None:
+            values.append(float_to_255(self._rgba.alpha))
+
+        as_hex = ''.join(f'{v:02x}' for v in values)
+        if all(c in repeat_colors for c in values):
+            as_hex = ''.join(as_hex[c] for c in range(0, len(as_hex), 2))
+        return '#' + as_hex
+
+    def as_rgb(self) -> str:
+        """
+        Color as an rgb(<r>, <g>, <b>) or rgba(<r>, <g>, <b>, <a>) string.
+        """
+        if self._rgba.alpha is None:
+            return f'rgb({float_to_255(self._rgba.r)}, {float_to_255(self._rgba.g)}, {float_to_255(self._rgba.b)})'
+        else:
+            return (
+                f'rgba({float_to_255(self._rgba.r)}, {float_to_255(self._rgba.g)}, {float_to_255(self._rgba.b)}, '
+                f'{round(self._alpha_float(), 2)})'
+            )
+
+    def as_rgb_tuple(self, *, alpha: Optional[bool] = None) -> ColorTuple:
+        """
+        Color as an RGB or RGBA tuple; red, green and blue are in the range 0 to 255, alpha if included is
+        in the range 0 to 1.
+
+        :param alpha: whether to include the alpha channel, options are
+          None - (default) include alpha only if it's set (e.g. not None)
+          True - always include alpha,
+          False - always omit alpha,
+        """
+        r, g, b = (float_to_255(c) for c in self._rgba[:3])
+        if alpha is None:
+            if self._rgba.alpha is None:
+                return r, g, b
+            else:
+                return r, g, b, self._alpha_float()
+        elif alpha:
+            return r, g, b, self._alpha_float()
+        else:
+            # alpha is False
+            return r, g, b
+
+    def as_hsl(self) -> str:
+        """
+        Color as an hsl(<h>, <s>, <l>) or hsl(<h>, <s>, <l>, <a>) string.
+        """
+        if self._rgba.alpha is None:
+            h, s, li = self.as_hsl_tuple(alpha=False)  # type: ignore
+            return f'hsl({h * 360:0.0f}, {s:0.0%}, {li:0.0%})'
+        else:
+            h, s, li, a = self.as_hsl_tuple(alpha=True)  # type: ignore
+            return f'hsl({h * 360:0.0f}, {s:0.0%}, {li:0.0%}, {round(a, 2)})'
+
+    def as_hsl_tuple(self, *, alpha: Optional[bool] = None) -> HslColorTuple:
+        """
+        Color as an HSL or HSLA tuple, e.g. hue, saturation, lightness and optionally alpha; all elements are in
+        the range 0 to 1.
+
+        NOTE: this is HSL as used in HTML and most other places, not HLS as used in python's colorsys.
+
+        :param alpha: whether to include the alpha channel, options are
+          None - (default) include alpha only if it's set (e.g. not None)
+          True - always include alpha,
+          False - always omit alpha,
+        """
+        h, l, s = rgb_to_hls(self._rgba.r, self._rgba.g, self._rgba.b)
+        if alpha is None:
+            if self._rgba.alpha is None:
+                return h, s, l
+            else:
+                return h, s, l, self._alpha_float()
+        if alpha:
+            return h, s, l, self._alpha_float()
+        else:
+            # alpha is False
+            return h, s, l
+
+    def _alpha_float(self) -> float:
+        return 1 if self._rgba.alpha is None else self._rgba.alpha
+
+    @classmethod
+    def __get_validators__(cls) -> 'CallableGenerator':
+        yield cls
+
+    def __str__(self) -> str:
+        return self.as_named(fallback=True)
+
+    def __repr_args__(self) -> 'ReprArgs':
+        return [(None, self.as_named(fallback=True))] + [('rgb', self.as_rgb_tuple())]  # type: ignore
+
+    def __eq__(self, other: Any) -> bool:
+        return isinstance(other, Color) and self.as_rgb_tuple() == other.as_rgb_tuple()
+
+    def __hash__(self) -> int:
+        return hash(self.as_rgb_tuple())
+
+
+def parse_tuple(value: Tuple[Any, ...]) -> RGBA:
+    """
+    Parse a tuple or list as a color.
+    """
+    if len(value) == 3:
+        r, g, b = (parse_color_value(v) for v in value)
+        return RGBA(r, g, b, None)
+    elif len(value) == 4:
+        r, g, b = (parse_color_value(v) for v in value[:3])
+        return RGBA(r, g, b, parse_float_alpha(value[3]))
+    else:
+        raise ColorError(reason='tuples must have length 3 or 4')
+
+
+def parse_str(value: str) -> RGBA:
+    """
+    Parse a string to an RGBA tuple, trying the following formats (in this order):
+    * named color, see COLORS_BY_NAME below
+    * hex short eg. `<prefix>fff` (prefix can be `#`, `0x` or nothing)
+    * hex long eg. `<prefix>ffffff` (prefix can be `#`, `0x` or nothing)
+    * `rgb(<r>, <g>, <b>) `
+    * `rgba(<r>, <g>, <b>, <a>)`
+    """
+    value_lower = value.lower()
+    try:
+        r, g, b = COLORS_BY_NAME[value_lower]
+    except KeyError:
+        pass
+    else:
+        return ints_to_rgba(r, g, b, None)
+
+    m = re.fullmatch(r_hex_short, value_lower)
+    if m:
+        *rgb, a = m.groups()
+        r, g, b = (int(v * 2, 16) for v in rgb)
+        if a:
+            alpha: Optional[float] = int(a * 2, 16) / 255
+        else:
+            alpha = None
+        return ints_to_rgba(r, g, b, alpha)
+
+    m = re.fullmatch(r_hex_long, value_lower)
+    if m:
+        *rgb, a = m.groups()
+        r, g, b = (int(v, 16) for v in rgb)
+        if a:
+            alpha = int(a, 16) / 255
+        else:
+            alpha = None
+        return ints_to_rgba(r, g, b, alpha)
+
+    m = re.fullmatch(r_rgb, value_lower)
+    if m:
+        return ints_to_rgba(*m.groups(), None)  # type: ignore
+
+    m = re.fullmatch(r_rgba, value_lower)
+    if m:
+        return ints_to_rgba(*m.groups())  # type: ignore
+
+    m = re.fullmatch(r_hsl, value_lower)
+    if m:
+        h, h_units, s, l_ = m.groups()
+        return parse_hsl(h, h_units, s, l_)
+
+    m = re.fullmatch(r_hsla, value_lower)
+    if m:
+        h, h_units, s, l_, a = m.groups()
+        return parse_hsl(h, h_units, s, l_, parse_float_alpha(a))
+
+    raise ColorError(reason='string not recognised as a valid color')
+
+
+def ints_to_rgba(r: Union[int, str], g: Union[int, str], b: Union[int, str], alpha: Optional[float]) -> RGBA:
+    return RGBA(parse_color_value(r), parse_color_value(g), parse_color_value(b), parse_float_alpha(alpha))
+
+
+def parse_color_value(value: Union[int, str], max_val: int = 255) -> float:
+    """
+    Parse a value checking it's a valid int in the range 0 to max_val and divide by max_val to give a number
+    in the range 0 to 1
+    """
+    try:
+        color = float(value)
+    except ValueError:
+        raise ColorError(reason='color values must be a valid number')
+    if 0 <= color <= max_val:
+        return color / max_val
+    else:
+        raise ColorError(reason=f'color values must be in the range 0 to {max_val}')
+
+
+def parse_float_alpha(value: Union[None, str, float, int]) -> Optional[float]:
+    """
+    Parse a value checking it's a valid float in the range 0 to 1
+    """
+    if value is None:
+        return None
+    try:
+        if isinstance(value, str) and value.endswith('%'):
+            alpha = float(value[:-1]) / 100
+        else:
+            alpha = float(value)
+    except ValueError:
+        raise ColorError(reason='alpha values must be a valid float')
+
+    if almost_equal_floats(alpha, 1):
+        return None
+    elif 0 <= alpha <= 1:
+        return alpha
+    else:
+        raise ColorError(reason='alpha values must be in the range 0 to 1')
+
+
+def parse_hsl(h: str, h_units: str, sat: str, light: str, alpha: Optional[float] = None) -> RGBA:
+    """
+    Parse raw hue, saturation, lightness and alpha values and convert to RGBA.
+    """
+    s_value, l_value = parse_color_value(sat, 100), parse_color_value(light, 100)
+
+    h_value = float(h)
+    if h_units in {None, 'deg'}:
+        h_value = h_value % 360 / 360
+    elif h_units == 'rad':
+        h_value = h_value % rads / rads
+    else:
+        # turns
+        h_value = h_value % 1
+
+    r, g, b = hls_to_rgb(h_value, l_value, s_value)
+    return RGBA(r, g, b, alpha)
+
+
+def float_to_255(c: float) -> int:
+    return int(round(c * 255))
+
+
+COLORS_BY_NAME = {
+    'aliceblue': (240, 248, 255),
+    'antiquewhite': (250, 235, 215),
+    'aqua': (0, 255, 255),
+    'aquamarine': (127, 255, 212),
+    'azure': (240, 255, 255),
+    'beige': (245, 245, 220),
+    'bisque': (255, 228, 196),
+    'black': (0, 0, 0),
+    'blanchedalmond': (255, 235, 205),
+    'blue': (0, 0, 255),
+    'blueviolet': (138, 43, 226),
+    'brown': (165, 42, 42),
+    'burlywood': (222, 184, 135),
+    'cadetblue': (95, 158, 160),
+    'chartreuse': (127, 255, 0),
+    'chocolate': (210, 105, 30),
+    'coral': (255, 127, 80),
+    'cornflowerblue': (100, 149, 237),
+    'cornsilk': (255, 248, 220),
+    'crimson': (220, 20, 60),
+    'cyan': (0, 255, 255),
+    'darkblue': (0, 0, 139),
+    'darkcyan': (0, 139, 139),
+    'darkgoldenrod': (184, 134, 11),
+    'darkgray': (169, 169, 169),
+    'darkgreen': (0, 100, 0),
+    'darkgrey': (169, 169, 169),
+    'darkkhaki': (189, 183, 107),
+    'darkmagenta': (139, 0, 139),
+    'darkolivegreen': (85, 107, 47),
+    'darkorange': (255, 140, 0),
+    'darkorchid': (153, 50, 204),
+    'darkred': (139, 0, 0),
+    'darksalmon': (233, 150, 122),
+    'darkseagreen': (143, 188, 143),
+    'darkslateblue': (72, 61, 139),
+    'darkslategray': (47, 79, 79),
+    'darkslategrey': (47, 79, 79),
+    'darkturquoise': (0, 206, 209),
+    'darkviolet': (148, 0, 211),
+    'deeppink': (255, 20, 147),
+    'deepskyblue': (0, 191, 255),
+    'dimgray': (105, 105, 105),
+    'dimgrey': (105, 105, 105),
+    'dodgerblue': (30, 144, 255),
+    'firebrick': (178, 34, 34),
+    'floralwhite': (255, 250, 240),
+    'forestgreen': (34, 139, 34),
+    'fuchsia': (255, 0, 255),
+    'gainsboro': (220, 220, 220),
+    'ghostwhite': (248, 248, 255),
+    'gold': (255, 215, 0),
+    'goldenrod': (218, 165, 32),
+    'gray': (128, 128, 128),
+    'green': (0, 128, 0),
+    'greenyellow': (173, 255, 47),
+    'grey': (128, 128, 128),
+    'honeydew': (240, 255, 240),
+    'hotpink': (255, 105, 180),
+    'indianred': (205, 92, 92),
+    'indigo': (75, 0, 130),
+    'ivory': (255, 255, 240),
+    'khaki': (240, 230, 140),
+    'lavender': (230, 230, 250),
+    'lavenderblush': (255, 240, 245),
+    'lawngreen': (124, 252, 0),
+    'lemonchiffon': (255, 250, 205),
+    'lightblue': (173, 216, 230),
+    'lightcoral': (240, 128, 128),
+    'lightcyan': (224, 255, 255),
+    'lightgoldenrodyellow': (250, 250, 210),
+    'lightgray': (211, 211, 211),
+    'lightgreen': (144, 238, 144),
+    'lightgrey': (211, 211, 211),
+    'lightpink': (255, 182, 193),
+    'lightsalmon': (255, 160, 122),
+    'lightseagreen': (32, 178, 170),
+    'lightskyblue': (135, 206, 250),
+    'lightslategray': (119, 136, 153),
+    'lightslategrey': (119, 136, 153),
+    'lightsteelblue': (176, 196, 222),
+    'lightyellow': (255, 255, 224),
+    'lime': (0, 255, 0),
+    'limegreen': (50, 205, 50),
+    'linen': (250, 240, 230),
+    'magenta': (255, 0, 255),
+    'maroon': (128, 0, 0),
+    'mediumaquamarine': (102, 205, 170),
+    'mediumblue': (0, 0, 205),
+    'mediumorchid': (186, 85, 211),
+    'mediumpurple': (147, 112, 219),
+    'mediumseagreen': (60, 179, 113),
+    'mediumslateblue': (123, 104, 238),
+    'mediumspringgreen': (0, 250, 154),
+    'mediumturquoise': (72, 209, 204),
+    'mediumvioletred': (199, 21, 133),
+    'midnightblue': (25, 25, 112),
+    'mintcream': (245, 255, 250),
+    'mistyrose': (255, 228, 225),
+    'moccasin': (255, 228, 181),
+    'navajowhite': (255, 222, 173),
+    'navy': (0, 0, 128),
+    'oldlace': (253, 245, 230),
+    'olive': (128, 128, 0),
+    'olivedrab': (107, 142, 35),
+    'orange': (255, 165, 0),
+    'orangered': (255, 69, 0),
+    'orchid': (218, 112, 214),
+    'palegoldenrod': (238, 232, 170),
+    'palegreen': (152, 251, 152),
+    'paleturquoise': (175, 238, 238),
+    'palevioletred': (219, 112, 147),
+    'papayawhip': (255, 239, 213),
+    'peachpuff': (255, 218, 185),
+    'peru': (205, 133, 63),
+    'pink': (255, 192, 203),
+    'plum': (221, 160, 221),
+    'powderblue': (176, 224, 230),
+    'purple': (128, 0, 128),
+    'red': (255, 0, 0),
+    'rosybrown': (188, 143, 143),
+    'royalblue': (65, 105, 225),
+    'saddlebrown': (139, 69, 19),
+    'salmon': (250, 128, 114),
+    'sandybrown': (244, 164, 96),
+    'seagreen': (46, 139, 87),
+    'seashell': (255, 245, 238),
+    'sienna': (160, 82, 45),
+    'silver': (192, 192, 192),
+    'skyblue': (135, 206, 235),
+    'slateblue': (106, 90, 205),
+    'slategray': (112, 128, 144),
+    'slategrey': (112, 128, 144),
+    'snow': (255, 250, 250),
+    'springgreen': (0, 255, 127),
+    'steelblue': (70, 130, 180),
+    'tan': (210, 180, 140),
+    'teal': (0, 128, 128),
+    'thistle': (216, 191, 216),
+    'tomato': (255, 99, 71),
+    'turquoise': (64, 224, 208),
+    'violet': (238, 130, 238),
+    'wheat': (245, 222, 179),
+    'white': (255, 255, 255),
+    'whitesmoke': (245, 245, 245),
+    'yellow': (255, 255, 0),
+    'yellowgreen': (154, 205, 50),
+}
+
+COLORS_BY_VALUE = {v: k for k, v in COLORS_BY_NAME.items()}
diff --git a/.venv/lib/python3.12/site-packages/pydantic/v1/config.py b/.venv/lib/python3.12/site-packages/pydantic/v1/config.py
new file mode 100644
index 00000000..18f7c999
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pydantic/v1/config.py
@@ -0,0 +1,191 @@
+import json
+from enum import Enum
+from typing import TYPE_CHECKING, Any, Callable, Dict, ForwardRef, Optional, Tuple, Type, Union
+
+from typing_extensions import Literal, Protocol
+
+from pydantic.v1.typing import AnyArgTCallable, AnyCallable
+from pydantic.v1.utils import GetterDict
+from pydantic.v1.version import compiled
+
+if TYPE_CHECKING:
+    from typing import overload
+
+    from pydantic.v1.fields import ModelField
+    from pydantic.v1.main import BaseModel
+
+    ConfigType = Type['BaseConfig']
+
+    class SchemaExtraCallable(Protocol):
+        @overload
+        def __call__(self, schema: Dict[str, Any]) -> None:
+            pass
+
+        @overload
+        def __call__(self, schema: Dict[str, Any], model_class: Type[BaseModel]) -> None:
+            pass
+
+else:
+    SchemaExtraCallable = Callable[..., None]
+
+__all__ = 'BaseConfig', 'ConfigDict', 'get_config', 'Extra', 'inherit_config', 'prepare_config'
+
+
+class Extra(str, Enum):
+    allow = 'allow'
+    ignore = 'ignore'
+    forbid = 'forbid'
+
+
+# https://github.com/cython/cython/issues/4003
+# Fixed in Cython 3 and Pydantic v1 won't support Cython 3.
+# Pydantic v2 doesn't depend on Cython at all.
+if not compiled:
+    from typing_extensions import TypedDict
+
+    class ConfigDict(TypedDict, total=False):
+        title: Optional[str]
+        anystr_lower: bool
+        anystr_strip_whitespace: bool
+        min_anystr_length: int
+        max_anystr_length: Optional[int]
+        validate_all: bool
+        extra: Extra
+        allow_mutation: bool
+        frozen: bool
+        allow_population_by_field_name: bool
+        use_enum_values: bool
+        fields: Dict[str, Union[str, Dict[str, str]]]
+        validate_assignment: bool
+        error_msg_templates: Dict[str, str]
+        arbitrary_types_allowed: bool
+        orm_mode: bool
+        getter_dict: Type[GetterDict]
+        alias_generator: Optional[Callable[[str], str]]
+        keep_untouched: Tuple[type, ...]
+        schema_extra: Union[Dict[str, object], 'SchemaExtraCallable']
+        json_loads: Callable[[str], object]
+        json_dumps: AnyArgTCallable[str]
+        json_encoders: Dict[Type[object], AnyCallable]
+        underscore_attrs_are_private: bool
+        allow_inf_nan: bool
+        copy_on_model_validation: Literal['none', 'deep', 'shallow']
+        # whether dataclass `__post_init__` should be run after validation
+        post_init_call: Literal['before_validation', 'after_validation']
+
+else:
+    ConfigDict = dict  # type: ignore
+
+
+class BaseConfig:
+    title: Optional[str] = None
+    anystr_lower: bool = False
+    anystr_upper: bool = False
+    anystr_strip_whitespace: bool = False
+    min_anystr_length: int = 0
+    max_anystr_length: Optional[int] = None
+    validate_all: bool = False
+    extra: Extra = Extra.ignore
+    allow_mutation: bool = True
+    frozen: bool = False
+    allow_population_by_field_name: bool = False
+    use_enum_values: bool = False
+    fields: Dict[str, Union[str, Dict[str, str]]] = {}
+    validate_assignment: bool = False
+    error_msg_templates: Dict[str, str] = {}
+    arbitrary_types_allowed: bool = False
+    orm_mode: bool = False
+    getter_dict: Type[GetterDict] = GetterDict
+    alias_generator: Optional[Callable[[str], str]] = None
+    keep_untouched: Tuple[type, ...] = ()
+    schema_extra: Union[Dict[str, Any], 'SchemaExtraCallable'] = {}
+    json_loads: Callable[[str], Any] = json.loads
+    json_dumps: Callable[..., str] = json.dumps
+    json_encoders: Dict[Union[Type[Any], str, ForwardRef], AnyCallable] = {}
+    underscore_attrs_are_private: bool = False
+    allow_inf_nan: bool = True
+
+    # whether inherited models as fields should be reconstructed as base model,
+    # and whether such a copy should be shallow or deep
+    copy_on_model_validation: Literal['none', 'deep', 'shallow'] = 'shallow'
+
+    # whether `Union` should check all allowed types before even trying to coerce
+    smart_union: bool = False
+    # whether dataclass `__post_init__` should be run before or after validation
+    post_init_call: Literal['before_validation', 'after_validation'] = 'before_validation'
+
+    @classmethod
+    def get_field_info(cls, name: str) -> Dict[str, Any]:
+        """
+        Get properties of FieldInfo from the `fields` property of the config class.
+        """
+
+        fields_value = cls.fields.get(name)
+
+        if isinstance(fields_value, str):
+            field_info: Dict[str, Any] = {'alias': fields_value}
+        elif isinstance(fields_value, dict):
+            field_info = fields_value
+        else:
+            field_info = {}
+
+        if 'alias' in field_info:
+            field_info.setdefault('alias_priority', 2)
+
+        if field_info.get('alias_priority', 0) <= 1 and cls.alias_generator:
+            alias = cls.alias_generator(name)
+            if not isinstance(alias, str):
+                raise TypeError(f'Config.alias_generator must return str, not {alias.__class__}')
+            field_info.update(alias=alias, alias_priority=1)
+        return field_info
+
+    @classmethod
+    def prepare_field(cls, field: 'ModelField') -> None:
+        """
+        Optional hook to check or modify fields during model creation.
+        """
+        pass
+
+
+def get_config(config: Union[ConfigDict, Type[object], None]) -> Type[BaseConfig]:
+    if config is None:
+        return BaseConfig
+
+    else:
+        config_dict = (
+            config
+            if isinstance(config, dict)
+            else {k: getattr(config, k) for k in dir(config) if not k.startswith('__')}
+        )
+
+        class Config(BaseConfig):
+            ...
+
+        for k, v in config_dict.items():
+            setattr(Config, k, v)
+        return Config
+
+
+def inherit_config(self_config: 'ConfigType', parent_config: 'ConfigType', **namespace: Any) -> 'ConfigType':
+    if not self_config:
+        base_classes: Tuple['ConfigType', ...] = (parent_config,)
+    elif self_config == parent_config:
+        base_classes = (self_config,)
+    else:
+        base_classes = self_config, parent_config
+
+    namespace['json_encoders'] = {
+        **getattr(parent_config, 'json_encoders', {}),
+        **getattr(self_config, 'json_encoders', {}),
+        **namespace.get('json_encoders', {}),
+    }
+
+    return type('Config', base_classes, namespace)
+
+
+def prepare_config(config: Type[BaseConfig], cls_name: str) -> None:
+    if not isinstance(config.extra, Extra):
+        try:
+            config.extra = Extra(config.extra)
+        except ValueError:
+            raise ValueError(f'"{cls_name}": {config.extra} is not a valid value for "extra"')
diff --git a/.venv/lib/python3.12/site-packages/pydantic/v1/dataclasses.py b/.venv/lib/python3.12/site-packages/pydantic/v1/dataclasses.py
new file mode 100644
index 00000000..bd167029
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pydantic/v1/dataclasses.py
@@ -0,0 +1,500 @@
+"""
+The main purpose is to enhance stdlib dataclasses by adding validation
+A pydantic dataclass can be generated from scratch or from a stdlib one.
+
+Behind the scene, a pydantic dataclass is just like a regular one on which we attach
+a `BaseModel` and magic methods to trigger the validation of the data.
+`__init__` and `__post_init__` are hence overridden and have extra logic to be
+able to validate input data.
+
+When a pydantic dataclass is generated from scratch, it's just a plain dataclass
+with validation triggered at initialization
+
+The tricky part if for stdlib dataclasses that are converted after into pydantic ones e.g.
+
+```py
+@dataclasses.dataclass
+class M:
+    x: int
+
+ValidatedM = pydantic.dataclasses.dataclass(M)
+```
+
+We indeed still want to support equality, hashing, repr, ... as if it was the stdlib one!
+
+```py
+assert isinstance(ValidatedM(x=1), M)
+assert ValidatedM(x=1) == M(x=1)
+```
+
+This means we **don't want to create a new dataclass that inherits from it**
+The trick is to create a wrapper around `M` that will act as a proxy to trigger
+validation without altering default `M` behaviour.
+"""
+import copy
+import dataclasses
+import sys
+from contextlib import contextmanager
+from functools import wraps
+
+try:
+    from functools import cached_property
+except ImportError:
+    # cached_property available only for python3.8+
+    pass
+
+from typing import TYPE_CHECKING, Any, Callable, ClassVar, Dict, Generator, Optional, Type, TypeVar, Union, overload
+
+from typing_extensions import dataclass_transform
+
+from pydantic.v1.class_validators import gather_all_validators
+from pydantic.v1.config import BaseConfig, ConfigDict, Extra, get_config
+from pydantic.v1.error_wrappers import ValidationError
+from pydantic.v1.errors import DataclassTypeError
+from pydantic.v1.fields import Field, FieldInfo, Required, Undefined
+from pydantic.v1.main import create_model, validate_model
+from pydantic.v1.utils import ClassAttribute
+
+if TYPE_CHECKING:
+    from pydantic.v1.main import BaseModel
+    from pydantic.v1.typing import CallableGenerator, NoArgAnyCallable
+
+    DataclassT = TypeVar('DataclassT', bound='Dataclass')
+
+    DataclassClassOrWrapper = Union[Type['Dataclass'], 'DataclassProxy']
+
+    class Dataclass:
+        # stdlib attributes
+        __dataclass_fields__: ClassVar[Dict[str, Any]]
+        __dataclass_params__: ClassVar[Any]  # in reality `dataclasses._DataclassParams`
+        __post_init__: ClassVar[Callable[..., None]]
+
+        # Added by pydantic
+        __pydantic_run_validation__: ClassVar[bool]
+        __post_init_post_parse__: ClassVar[Callable[..., None]]
+        __pydantic_initialised__: ClassVar[bool]
+        __pydantic_model__: ClassVar[Type[BaseModel]]
+        __pydantic_validate_values__: ClassVar[Callable[['Dataclass'], None]]
+        __pydantic_has_field_info_default__: ClassVar[bool]  # whether a `pydantic.Field` is used as default value
+
+        def __init__(self, *args: object, **kwargs: object) -> None:
+            pass
+
+        @classmethod
+        def __get_validators__(cls: Type['Dataclass']) -> 'CallableGenerator':
+            pass
+
+        @classmethod
+        def __validate__(cls: Type['DataclassT'], v: Any) -> 'DataclassT':
+            pass
+
+
+__all__ = [
+    'dataclass',
+    'set_validation',
+    'create_pydantic_model_from_dataclass',
+    'is_builtin_dataclass',
+    'make_dataclass_validator',
+]
+
+_T = TypeVar('_T')
+
+if sys.version_info >= (3, 10):
+
+    @dataclass_transform(field_specifiers=(dataclasses.field, Field))
+    @overload
+    def dataclass(
+        *,
+        init: bool = True,
+        repr: bool = True,
+        eq: bool = True,
+        order: bool = False,
+        unsafe_hash: bool = False,
+        frozen: bool = False,
+        config: Union[ConfigDict, Type[object], None] = None,
+        validate_on_init: Optional[bool] = None,
+        use_proxy: Optional[bool] = None,
+        kw_only: bool = ...,
+    ) -> Callable[[Type[_T]], 'DataclassClassOrWrapper']:
+        ...
+
+    @dataclass_transform(field_specifiers=(dataclasses.field, Field))
+    @overload
+    def dataclass(
+        _cls: Type[_T],
+        *,
+        init: bool = True,
+        repr: bool = True,
+        eq: bool = True,
+        order: bool = False,
+        unsafe_hash: bool = False,
+        frozen: bool = False,
+        config: Union[ConfigDict, Type[object], None] = None,
+        validate_on_init: Optional[bool] = None,
+        use_proxy: Optional[bool] = None,
+        kw_only: bool = ...,
+    ) -> 'DataclassClassOrWrapper':
+        ...
+
+else:
+
+    @dataclass_transform(field_specifiers=(dataclasses.field, Field))
+    @overload
+    def dataclass(
+        *,
+        init: bool = True,
+        repr: bool = True,
+        eq: bool = True,
+        order: bool = False,
+        unsafe_hash: bool = False,
+        frozen: bool = False,
+        config: Union[ConfigDict, Type[object], None] = None,
+        validate_on_init: Optional[bool] = None,
+        use_proxy: Optional[bool] = None,
+    ) -> Callable[[Type[_T]], 'DataclassClassOrWrapper']:
+        ...
+
+    @dataclass_transform(field_specifiers=(dataclasses.field, Field))
+    @overload
+    def dataclass(
+        _cls: Type[_T],
+        *,
+        init: bool = True,
+        repr: bool = True,
+        eq: bool = True,
+        order: bool = False,
+        unsafe_hash: bool = False,
+        frozen: bool = False,
+        config: Union[ConfigDict, Type[object], None] = None,
+        validate_on_init: Optional[bool] = None,
+        use_proxy: Optional[bool] = None,
+    ) -> 'DataclassClassOrWrapper':
+        ...
+
+
+@dataclass_transform(field_specifiers=(dataclasses.field, Field))
+def dataclass(
+    _cls: Optional[Type[_T]] = None,
+    *,
+    init: bool = True,
+    repr: bool = True,
+    eq: bool = True,
+    order: bool = False,
+    unsafe_hash: bool = False,
+    frozen: bool = False,
+    config: Union[ConfigDict, Type[object], None] = None,
+    validate_on_init: Optional[bool] = None,
+    use_proxy: Optional[bool] = None,
+    kw_only: bool = False,
+) -> Union[Callable[[Type[_T]], 'DataclassClassOrWrapper'], 'DataclassClassOrWrapper']:
+    """
+    Like the python standard lib dataclasses but with type validation.
+    The result is either a pydantic dataclass that will validate input data
+    or a wrapper that will trigger validation around a stdlib dataclass
+    to avoid modifying it directly
+    """
+    the_config = get_config(config)
+
+    def wrap(cls: Type[Any]) -> 'DataclassClassOrWrapper':
+        should_use_proxy = (
+            use_proxy
+            if use_proxy is not None
+            else (
+                is_builtin_dataclass(cls)
+                and (cls.__bases__[0] is object or set(dir(cls)) == set(dir(cls.__bases__[0])))
+            )
+        )
+        if should_use_proxy:
+            dc_cls_doc = ''
+            dc_cls = DataclassProxy(cls)
+            default_validate_on_init = False
+        else:
+            dc_cls_doc = cls.__doc__ or ''  # needs to be done before generating dataclass
+            if sys.version_info >= (3, 10):
+                dc_cls = dataclasses.dataclass(
+                    cls,
+                    init=init,
+                    repr=repr,
+                    eq=eq,
+                    order=order,
+                    unsafe_hash=unsafe_hash,
+                    frozen=frozen,
+                    kw_only=kw_only,
+                )
+            else:
+                dc_cls = dataclasses.dataclass(  # type: ignore
+                    cls, init=init, repr=repr, eq=eq, order=order, unsafe_hash=unsafe_hash, frozen=frozen
+                )
+            default_validate_on_init = True
+
+        should_validate_on_init = default_validate_on_init if validate_on_init is None else validate_on_init
+        _add_pydantic_validation_attributes(cls, the_config, should_validate_on_init, dc_cls_doc)
+        dc_cls.__pydantic_model__.__try_update_forward_refs__(**{cls.__name__: cls})
+        return dc_cls
+
+    if _cls is None:
+        return wrap
+
+    return wrap(_cls)
+
+
+@contextmanager
+def set_validation(cls: Type['DataclassT'], value: bool) -> Generator[Type['DataclassT'], None, None]:
+    original_run_validation = cls.__pydantic_run_validation__
+    try:
+        cls.__pydantic_run_validation__ = value
+        yield cls
+    finally:
+        cls.__pydantic_run_validation__ = original_run_validation
+
+
+class DataclassProxy:
+    __slots__ = '__dataclass__'
+
+    def __init__(self, dc_cls: Type['Dataclass']) -> None:
+        object.__setattr__(self, '__dataclass__', dc_cls)
+
+    def __call__(self, *args: Any, **kwargs: Any) -> Any:
+        with set_validation(self.__dataclass__, True):
+            return self.__dataclass__(*args, **kwargs)
+
+    def __getattr__(self, name: str) -> Any:
+        return getattr(self.__dataclass__, name)
+
+    def __setattr__(self, __name: str, __value: Any) -> None:
+        return setattr(self.__dataclass__, __name, __value)
+
+    def __instancecheck__(self, instance: Any) -> bool:
+        return isinstance(instance, self.__dataclass__)
+
+    def __copy__(self) -> 'DataclassProxy':
+        return DataclassProxy(copy.copy(self.__dataclass__))
+
+    def __deepcopy__(self, memo: Any) -> 'DataclassProxy':
+        return DataclassProxy(copy.deepcopy(self.__dataclass__, memo))
+
+
+def _add_pydantic_validation_attributes(  # noqa: C901 (ignore complexity)
+    dc_cls: Type['Dataclass'],
+    config: Type[BaseConfig],
+    validate_on_init: bool,
+    dc_cls_doc: str,
+) -> None:
+    """
+    We need to replace the right method. If no `__post_init__` has been set in the stdlib dataclass
+    it won't even exist (code is generated on the fly by `dataclasses`)
+    By default, we run validation after `__init__` or `__post_init__` if defined
+    """
+    init = dc_cls.__init__
+
+    @wraps(init)
+    def handle_extra_init(self: 'Dataclass', *args: Any, **kwargs: Any) -> None:
+        if config.extra == Extra.ignore:
+            init(self, *args, **{k: v for k, v in kwargs.items() if k in self.__dataclass_fields__})
+
+        elif config.extra == Extra.allow:
+            for k, v in kwargs.items():
+                self.__dict__.setdefault(k, v)
+            init(self, *args, **{k: v for k, v in kwargs.items() if k in self.__dataclass_fields__})
+
+        else:
+            init(self, *args, **kwargs)
+
+    if hasattr(dc_cls, '__post_init__'):
+        try:
+            post_init = dc_cls.__post_init__.__wrapped__  # type: ignore[attr-defined]
+        except AttributeError:
+            post_init = dc_cls.__post_init__
+
+        @wraps(post_init)
+        def new_post_init(self: 'Dataclass', *args: Any, **kwargs: Any) -> None:
+            if config.post_init_call == 'before_validation':
+                post_init(self, *args, **kwargs)
+
+            if self.__class__.__pydantic_run_validation__:
+                self.__pydantic_validate_values__()
+                if hasattr(self, '__post_init_post_parse__'):
+                    self.__post_init_post_parse__(*args, **kwargs)
+
+            if config.post_init_call == 'after_validation':
+                post_init(self, *args, **kwargs)
+
+        setattr(dc_cls, '__init__', handle_extra_init)
+        setattr(dc_cls, '__post_init__', new_post_init)
+
+    else:
+
+        @wraps(init)
+        def new_init(self: 'Dataclass', *args: Any, **kwargs: Any) -> None:
+            handle_extra_init(self, *args, **kwargs)
+
+            if self.__class__.__pydantic_run_validation__:
+                self.__pydantic_validate_values__()
+
+            if hasattr(self, '__post_init_post_parse__'):
+                # We need to find again the initvars. To do that we use `__dataclass_fields__` instead of
+                # public method `dataclasses.fields`
+
+                # get all initvars and their default values
+                initvars_and_values: Dict[str, Any] = {}
+                for i, f in enumerate(self.__class__.__dataclass_fields__.values()):
+                    if f._field_type is dataclasses._FIELD_INITVAR:  # type: ignore[attr-defined]
+                        try:
+                            # set arg value by default
+                            initvars_and_values[f.name] = args[i]
+                        except IndexError:
+                            initvars_and_values[f.name] = kwargs.get(f.name, f.default)
+
+                self.__post_init_post_parse__(**initvars_and_values)
+
+        setattr(dc_cls, '__init__', new_init)
+
+    setattr(dc_cls, '__pydantic_run_validation__', ClassAttribute('__pydantic_run_validation__', validate_on_init))
+    setattr(dc_cls, '__pydantic_initialised__', False)
+    setattr(dc_cls, '__pydantic_model__', create_pydantic_model_from_dataclass(dc_cls, config, dc_cls_doc))
+    setattr(dc_cls, '__pydantic_validate_values__', _dataclass_validate_values)
+    setattr(dc_cls, '__validate__', classmethod(_validate_dataclass))
+    setattr(dc_cls, '__get_validators__', classmethod(_get_validators))
+
+    if dc_cls.__pydantic_model__.__config__.validate_assignment and not dc_cls.__dataclass_params__.frozen:
+        setattr(dc_cls, '__setattr__', _dataclass_validate_assignment_setattr)
+
+
+def _get_validators(cls: 'DataclassClassOrWrapper') -> 'CallableGenerator':
+    yield cls.__validate__
+
+
+def _validate_dataclass(cls: Type['DataclassT'], v: Any) -> 'DataclassT':
+    with set_validation(cls, True):
+        if isinstance(v, cls):
+            v.__pydantic_validate_values__()
+            return v
+        elif isinstance(v, (list, tuple)):
+            return cls(*v)
+        elif isinstance(v, dict):
+            return cls(**v)
+        else:
+            raise DataclassTypeError(class_name=cls.__name__)
+
+
+def create_pydantic_model_from_dataclass(
+    dc_cls: Type['Dataclass'],
+    config: Type[Any] = BaseConfig,
+    dc_cls_doc: Optional[str] = None,
+) -> Type['BaseModel']:
+    field_definitions: Dict[str, Any] = {}
+    for field in dataclasses.fields(dc_cls):
+        default: Any = Undefined
+        default_factory: Optional['NoArgAnyCallable'] = None
+        field_info: FieldInfo
+
+        if field.default is not dataclasses.MISSING:
+            default = field.default
+        elif field.default_factory is not dataclasses.MISSING:
+            default_factory = field.default_factory
+        else:
+            default = Required
+
+        if isinstance(default, FieldInfo):
+            field_info = default
+            dc_cls.__pydantic_has_field_info_default__ = True
+        else:
+            field_info = Field(default=default, default_factory=default_factory, **field.metadata)
+
+        field_definitions[field.name] = (field.type, field_info)
+
+    validators = gather_all_validators(dc_cls)
+    model: Type['BaseModel'] = create_model(
+        dc_cls.__name__,
+        __config__=config,
+        __module__=dc_cls.__module__,
+        __validators__=validators,
+        __cls_kwargs__={'__resolve_forward_refs__': False},
+        **field_definitions,
+    )
+    model.__doc__ = dc_cls_doc if dc_cls_doc is not None else dc_cls.__doc__ or ''
+    return model
+
+
+if sys.version_info >= (3, 8):
+
+    def _is_field_cached_property(obj: 'Dataclass', k: str) -> bool:
+        return isinstance(getattr(type(obj), k, None), cached_property)
+
+else:
+
+    def _is_field_cached_property(obj: 'Dataclass', k: str) -> bool:
+        return False
+
+
+def _dataclass_validate_values(self: 'Dataclass') -> None:
+    # validation errors can occur if this function is called twice on an already initialised dataclass.
+    # for example if Extra.forbid is enabled, it would consider __pydantic_initialised__ an invalid extra property
+    if getattr(self, '__pydantic_initialised__'):
+        return
+    if getattr(self, '__pydantic_has_field_info_default__', False):
+        # We need to remove `FieldInfo` values since they are not valid as input
+        # It's ok to do that because they are obviously the default values!
+        input_data = {
+            k: v
+            for k, v in self.__dict__.items()
+            if not (isinstance(v, FieldInfo) or _is_field_cached_property(self, k))
+        }
+    else:
+        input_data = {k: v for k, v in self.__dict__.items() if not _is_field_cached_property(self, k)}
+    d, _, validation_error = validate_model(self.__pydantic_model__, input_data, cls=self.__class__)
+    if validation_error:
+        raise validation_error
+    self.__dict__.update(d)
+    object.__setattr__(self, '__pydantic_initialised__', True)
+
+
+def _dataclass_validate_assignment_setattr(self: 'Dataclass', name: str, value: Any) -> None:
+    if self.__pydantic_initialised__:
+        d = dict(self.__dict__)
+        d.pop(name, None)
+        known_field = self.__pydantic_model__.__fields__.get(name, None)
+        if known_field:
+            value, error_ = known_field.validate(value, d, loc=name, cls=self.__class__)
+            if error_:
+                raise ValidationError([error_], self.__class__)
+
+    object.__setattr__(self, name, value)
+
+
+def is_builtin_dataclass(_cls: Type[Any]) -> bool:
+    """
+    Whether a class is a stdlib dataclass
+    (useful to discriminated a pydantic dataclass that is actually a wrapper around a stdlib dataclass)
+
+    we check that
+    - `_cls` is a dataclass
+    - `_cls` is not a processed pydantic dataclass (with a basemodel attached)
+    - `_cls` is not a pydantic dataclass inheriting directly from a stdlib dataclass
+    e.g.
+    ```
+    @dataclasses.dataclass
+    class A:
+        x: int
+
+    @pydantic.dataclasses.dataclass
+    class B(A):
+        y: int
+    ```
+    In this case, when we first check `B`, we make an extra check and look at the annotations ('y'),
+    which won't be a superset of all the dataclass fields (only the stdlib fields i.e. 'x')
+    """
+    return (
+        dataclasses.is_dataclass(_cls)
+        and not hasattr(_cls, '__pydantic_model__')
+        and set(_cls.__dataclass_fields__).issuperset(set(getattr(_cls, '__annotations__', {})))
+    )
+
+
+def make_dataclass_validator(dc_cls: Type['Dataclass'], config: Type[BaseConfig]) -> 'CallableGenerator':
+    """
+    Create a pydantic.dataclass from a builtin dataclass to add type validation
+    and yield the validators
+    It retrieves the parameters of the dataclass and forwards them to the newly created dataclass
+    """
+    yield from _get_validators(dataclass(dc_cls, config=config, use_proxy=True))
diff --git a/.venv/lib/python3.12/site-packages/pydantic/v1/datetime_parse.py b/.venv/lib/python3.12/site-packages/pydantic/v1/datetime_parse.py
new file mode 100644
index 00000000..a7598fc6
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pydantic/v1/datetime_parse.py
@@ -0,0 +1,248 @@
+"""
+Functions to parse datetime objects.
+
+We're using regular expressions rather than time.strptime because:
+- They provide both validation and parsing.
+- They're more flexible for datetimes.
+- The date/datetime/time constructors produce friendlier error messages.
+
+Stolen from https://raw.githubusercontent.com/django/django/main/django/utils/dateparse.py at
+9718fa2e8abe430c3526a9278dd976443d4ae3c6
+
+Changed to:
+* use standard python datetime types not django.utils.timezone
+* raise ValueError when regex doesn't match rather than returning None
+* support parsing unix timestamps for dates and datetimes
+"""
+import re
+from datetime import date, datetime, time, timedelta, timezone
+from typing import Dict, Optional, Type, Union
+
+from pydantic.v1 import errors
+
+date_expr = r'(?P<year>\d{4})-(?P<month>\d{1,2})-(?P<day>\d{1,2})'
+time_expr = (
+    r'(?P<hour>\d{1,2}):(?P<minute>\d{1,2})'
+    r'(?::(?P<second>\d{1,2})(?:\.(?P<microsecond>\d{1,6})\d{0,6})?)?'
+    r'(?P<tzinfo>Z|[+-]\d{2}(?::?\d{2})?)?$'
+)
+
+date_re = re.compile(f'{date_expr}$')
+time_re = re.compile(time_expr)
+datetime_re = re.compile(f'{date_expr}[T ]{time_expr}')
+
+standard_duration_re = re.compile(
+    r'^'
+    r'(?:(?P<days>-?\d+) (days?, )?)?'
+    r'((?:(?P<hours>-?\d+):)(?=\d+:\d+))?'
+    r'(?:(?P<minutes>-?\d+):)?'
+    r'(?P<seconds>-?\d+)'
+    r'(?:\.(?P<microseconds>\d{1,6})\d{0,6})?'
+    r'$'
+)
+
+# Support the sections of ISO 8601 date representation that are accepted by timedelta
+iso8601_duration_re = re.compile(
+    r'^(?P<sign>[-+]?)'
+    r'P'
+    r'(?:(?P<days>\d+(.\d+)?)D)?'
+    r'(?:T'
+    r'(?:(?P<hours>\d+(.\d+)?)H)?'
+    r'(?:(?P<minutes>\d+(.\d+)?)M)?'
+    r'(?:(?P<seconds>\d+(.\d+)?)S)?'
+    r')?'
+    r'$'
+)
+
+EPOCH = datetime(1970, 1, 1)
+# if greater than this, the number is in ms, if less than or equal it's in seconds
+# (in seconds this is 11th October 2603, in ms it's 20th August 1970)
+MS_WATERSHED = int(2e10)
+# slightly more than datetime.max in ns - (datetime.max - EPOCH).total_seconds() * 1e9
+MAX_NUMBER = int(3e20)
+StrBytesIntFloat = Union[str, bytes, int, float]
+
+
+def get_numeric(value: StrBytesIntFloat, native_expected_type: str) -> Union[None, int, float]:
+    if isinstance(value, (int, float)):
+        return value
+    try:
+        return float(value)
+    except ValueError:
+        return None
+    except TypeError:
+        raise TypeError(f'invalid type; expected {native_expected_type}, string, bytes, int or float')
+
+
+def from_unix_seconds(seconds: Union[int, float]) -> datetime:
+    if seconds > MAX_NUMBER:
+        return datetime.max
+    elif seconds < -MAX_NUMBER:
+        return datetime.min
+
+    while abs(seconds) > MS_WATERSHED:
+        seconds /= 1000
+    dt = EPOCH + timedelta(seconds=seconds)
+    return dt.replace(tzinfo=timezone.utc)
+
+
+def _parse_timezone(value: Optional[str], error: Type[Exception]) -> Union[None, int, timezone]:
+    if value == 'Z':
+        return timezone.utc
+    elif value is not None:
+        offset_mins = int(value[-2:]) if len(value) > 3 else 0
+        offset = 60 * int(value[1:3]) + offset_mins
+        if value[0] == '-':
+            offset = -offset
+        try:
+            return timezone(timedelta(minutes=offset))
+        except ValueError:
+            raise error()
+    else:
+        return None
+
+
+def parse_date(value: Union[date, StrBytesIntFloat]) -> date:
+    """
+    Parse a date/int/float/string and return a datetime.date.
+
+    Raise ValueError if the input is well formatted but not a valid date.
+    Raise ValueError if the input isn't well formatted.
+    """
+    if isinstance(value, date):
+        if isinstance(value, datetime):
+            return value.date()
+        else:
+            return value
+
+    number = get_numeric(value, 'date')
+    if number is not None:
+        return from_unix_seconds(number).date()
+
+    if isinstance(value, bytes):
+        value = value.decode()
+
+    match = date_re.match(value)  # type: ignore
+    if match is None:
+        raise errors.DateError()
+
+    kw = {k: int(v) for k, v in match.groupdict().items()}
+
+    try:
+        return date(**kw)
+    except ValueError:
+        raise errors.DateError()
+
+
+def parse_time(value: Union[time, StrBytesIntFloat]) -> time:
+    """
+    Parse a time/string and return a datetime.time.
+
+    Raise ValueError if the input is well formatted but not a valid time.
+    Raise ValueError if the input isn't well formatted, in particular if it contains an offset.
+    """
+    if isinstance(value, time):
+        return value
+
+    number = get_numeric(value, 'time')
+    if number is not None:
+        if number >= 86400:
+            # doesn't make sense since the time time loop back around to 0
+            raise errors.TimeError()
+        return (datetime.min + timedelta(seconds=number)).time()
+
+    if isinstance(value, bytes):
+        value = value.decode()
+
+    match = time_re.match(value)  # type: ignore
+    if match is None:
+        raise errors.TimeError()
+
+    kw = match.groupdict()
+    if kw['microsecond']:
+        kw['microsecond'] = kw['microsecond'].ljust(6, '0')
+
+    tzinfo = _parse_timezone(kw.pop('tzinfo'), errors.TimeError)
+    kw_: Dict[str, Union[None, int, timezone]] = {k: int(v) for k, v in kw.items() if v is not None}
+    kw_['tzinfo'] = tzinfo
+
+    try:
+        return time(**kw_)  # type: ignore
+    except ValueError:
+        raise errors.TimeError()
+
+
+def parse_datetime(value: Union[datetime, StrBytesIntFloat]) -> datetime:
+    """
+    Parse a datetime/int/float/string and return a datetime.datetime.
+
+    This function supports time zone offsets. When the input contains one,
+    the output uses a timezone with a fixed offset from UTC.
+
+    Raise ValueError if the input is well formatted but not a valid datetime.
+    Raise ValueError if the input isn't well formatted.
+    """
+    if isinstance(value, datetime):
+        return value
+
+    number = get_numeric(value, 'datetime')
+    if number is not None:
+        return from_unix_seconds(number)
+
+    if isinstance(value, bytes):
+        value = value.decode()
+
+    match = datetime_re.match(value)  # type: ignore
+    if match is None:
+        raise errors.DateTimeError()
+
+    kw = match.groupdict()
+    if kw['microsecond']:
+        kw['microsecond'] = kw['microsecond'].ljust(6, '0')
+
+    tzinfo = _parse_timezone(kw.pop('tzinfo'), errors.DateTimeError)
+    kw_: Dict[str, Union[None, int, timezone]] = {k: int(v) for k, v in kw.items() if v is not None}
+    kw_['tzinfo'] = tzinfo
+
+    try:
+        return datetime(**kw_)  # type: ignore
+    except ValueError:
+        raise errors.DateTimeError()
+
+
+def parse_duration(value: StrBytesIntFloat) -> timedelta:
+    """
+    Parse a duration int/float/string and return a datetime.timedelta.
+
+    The preferred format for durations in Django is '%d %H:%M:%S.%f'.
+
+    Also supports ISO 8601 representation.
+    """
+    if isinstance(value, timedelta):
+        return value
+
+    if isinstance(value, (int, float)):
+        # below code requires a string
+        value = f'{value:f}'
+    elif isinstance(value, bytes):
+        value = value.decode()
+
+    try:
+        match = standard_duration_re.match(value) or iso8601_duration_re.match(value)
+    except TypeError:
+        raise TypeError('invalid type; expected timedelta, string, bytes, int or float')
+
+    if not match:
+        raise errors.DurationError()
+
+    kw = match.groupdict()
+    sign = -1 if kw.pop('sign', '+') == '-' else 1
+    if kw.get('microseconds'):
+        kw['microseconds'] = kw['microseconds'].ljust(6, '0')
+
+    if kw.get('seconds') and kw.get('microseconds') and kw['seconds'].startswith('-'):
+        kw['microseconds'] = '-' + kw['microseconds']
+
+    kw_ = {k: float(v) for k, v in kw.items() if v is not None}
+
+    return sign * timedelta(**kw_)
diff --git a/.venv/lib/python3.12/site-packages/pydantic/v1/decorator.py b/.venv/lib/python3.12/site-packages/pydantic/v1/decorator.py
new file mode 100644
index 00000000..2c7c2c2f
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pydantic/v1/decorator.py
@@ -0,0 +1,264 @@
+from functools import wraps
+from typing import TYPE_CHECKING, Any, Callable, Dict, List, Mapping, Optional, Tuple, Type, TypeVar, Union, overload
+
+from pydantic.v1 import validator
+from pydantic.v1.config import Extra
+from pydantic.v1.errors import ConfigError
+from pydantic.v1.main import BaseModel, create_model
+from pydantic.v1.typing import get_all_type_hints
+from pydantic.v1.utils import to_camel
+
+__all__ = ('validate_arguments',)
+
+if TYPE_CHECKING:
+    from pydantic.v1.typing import AnyCallable
+
+    AnyCallableT = TypeVar('AnyCallableT', bound=AnyCallable)
+    ConfigType = Union[None, Type[Any], Dict[str, Any]]
+
+
+@overload
+def validate_arguments(func: None = None, *, config: 'ConfigType' = None) -> Callable[['AnyCallableT'], 'AnyCallableT']:
+    ...
+
+
+@overload
+def validate_arguments(func: 'AnyCallableT') -> 'AnyCallableT':
+    ...
+
+
+def validate_arguments(func: Optional['AnyCallableT'] = None, *, config: 'ConfigType' = None) -> Any:
+    """
+    Decorator to validate the arguments passed to a function.
+    """
+
+    def validate(_func: 'AnyCallable') -> 'AnyCallable':
+        vd = ValidatedFunction(_func, config)
+
+        @wraps(_func)
+        def wrapper_function(*args: Any, **kwargs: Any) -> Any:
+            return vd.call(*args, **kwargs)
+
+        wrapper_function.vd = vd  # type: ignore
+        wrapper_function.validate = vd.init_model_instance  # type: ignore
+        wrapper_function.raw_function = vd.raw_function  # type: ignore
+        wrapper_function.model = vd.model  # type: ignore
+        return wrapper_function
+
+    if func:
+        return validate(func)
+    else:
+        return validate
+
+
+ALT_V_ARGS = 'v__args'
+ALT_V_KWARGS = 'v__kwargs'
+V_POSITIONAL_ONLY_NAME = 'v__positional_only'
+V_DUPLICATE_KWARGS = 'v__duplicate_kwargs'
+
+
+class ValidatedFunction:
+    def __init__(self, function: 'AnyCallableT', config: 'ConfigType'):  # noqa C901
+        from inspect import Parameter, signature
+
+        parameters: Mapping[str, Parameter] = signature(function).parameters
+
+        if parameters.keys() & {ALT_V_ARGS, ALT_V_KWARGS, V_POSITIONAL_ONLY_NAME, V_DUPLICATE_KWARGS}:
+            raise ConfigError(
+                f'"{ALT_V_ARGS}", "{ALT_V_KWARGS}", "{V_POSITIONAL_ONLY_NAME}" and "{V_DUPLICATE_KWARGS}" '
+                f'are not permitted as argument names when using the "{validate_arguments.__name__}" decorator'
+            )
+
+        self.raw_function = function
+        self.arg_mapping: Dict[int, str] = {}
+        self.positional_only_args = set()
+        self.v_args_name = 'args'
+        self.v_kwargs_name = 'kwargs'
+
+        type_hints = get_all_type_hints(function)
+        takes_args = False
+        takes_kwargs = False
+        fields: Dict[str, Tuple[Any, Any]] = {}
+        for i, (name, p) in enumerate(parameters.items()):
+            if p.annotation is p.empty:
+                annotation = Any
+            else:
+                annotation = type_hints[name]
+
+            default = ... if p.default is p.empty else p.default
+            if p.kind == Parameter.POSITIONAL_ONLY:
+                self.arg_mapping[i] = name
+                fields[name] = annotation, default
+                fields[V_POSITIONAL_ONLY_NAME] = List[str], None
+                self.positional_only_args.add(name)
+            elif p.kind == Parameter.POSITIONAL_OR_KEYWORD:
+                self.arg_mapping[i] = name
+                fields[name] = annotation, default
+                fields[V_DUPLICATE_KWARGS] = List[str], None
+            elif p.kind == Parameter.KEYWORD_ONLY:
+                fields[name] = annotation, default
+            elif p.kind == Parameter.VAR_POSITIONAL:
+                self.v_args_name = name
+                fields[name] = Tuple[annotation, ...], None
+                takes_args = True
+            else:
+                assert p.kind == Parameter.VAR_KEYWORD, p.kind
+                self.v_kwargs_name = name
+                fields[name] = Dict[str, annotation], None  # type: ignore
+                takes_kwargs = True
+
+        # these checks avoid a clash between "args" and a field with that name
+        if not takes_args and self.v_args_name in fields:
+            self.v_args_name = ALT_V_ARGS
+
+        # same with "kwargs"
+        if not takes_kwargs and self.v_kwargs_name in fields:
+            self.v_kwargs_name = ALT_V_KWARGS
+
+        if not takes_args:
+            # we add the field so validation below can raise the correct exception
+            fields[self.v_args_name] = List[Any], None
+
+        if not takes_kwargs:
+            # same with kwargs
+            fields[self.v_kwargs_name] = Dict[Any, Any], None
+
+        self.create_model(fields, takes_args, takes_kwargs, config)
+
+    def init_model_instance(self, *args: Any, **kwargs: Any) -> BaseModel:
+        values = self.build_values(args, kwargs)
+        return self.model(**values)
+
+    def call(self, *args: Any, **kwargs: Any) -> Any:
+        m = self.init_model_instance(*args, **kwargs)
+        return self.execute(m)
+
+    def build_values(self, args: Tuple[Any, ...], kwargs: Dict[str, Any]) -> Dict[str, Any]:
+        values: Dict[str, Any] = {}
+        if args:
+            arg_iter = enumerate(args)
+            while True:
+                try:
+                    i, a = next(arg_iter)
+                except StopIteration:
+                    break
+                arg_name = self.arg_mapping.get(i)
+                if arg_name is not None:
+                    values[arg_name] = a
+                else:
+                    values[self.v_args_name] = [a] + [a for _, a in arg_iter]
+                    break
+
+        var_kwargs: Dict[str, Any] = {}
+        wrong_positional_args = []
+        duplicate_kwargs = []
+        fields_alias = [
+            field.alias
+            for name, field in self.model.__fields__.items()
+            if name not in (self.v_args_name, self.v_kwargs_name)
+        ]
+        non_var_fields = set(self.model.__fields__) - {self.v_args_name, self.v_kwargs_name}
+        for k, v in kwargs.items():
+            if k in non_var_fields or k in fields_alias:
+                if k in self.positional_only_args:
+                    wrong_positional_args.append(k)
+                if k in values:
+                    duplicate_kwargs.append(k)
+                values[k] = v
+            else:
+                var_kwargs[k] = v
+
+        if var_kwargs:
+            values[self.v_kwargs_name] = var_kwargs
+        if wrong_positional_args:
+            values[V_POSITIONAL_ONLY_NAME] = wrong_positional_args
+        if duplicate_kwargs:
+            values[V_DUPLICATE_KWARGS] = duplicate_kwargs
+        return values
+
+    def execute(self, m: BaseModel) -> Any:
+        d = {k: v for k, v in m._iter() if k in m.__fields_set__ or m.__fields__[k].default_factory}
+        var_kwargs = d.pop(self.v_kwargs_name, {})
+
+        if self.v_args_name in d:
+            args_: List[Any] = []
+            in_kwargs = False
+            kwargs = {}
+            for name, value in d.items():
+                if in_kwargs:
+                    kwargs[name] = value
+                elif name == self.v_args_name:
+                    args_ += value
+                    in_kwargs = True
+                else:
+                    args_.append(value)
+            return self.raw_function(*args_, **kwargs, **var_kwargs)
+        elif self.positional_only_args:
+            args_ = []
+            kwargs = {}
+            for name, value in d.items():
+                if name in self.positional_only_args:
+                    args_.append(value)
+                else:
+                    kwargs[name] = value
+            return self.raw_function(*args_, **kwargs, **var_kwargs)
+        else:
+            return self.raw_function(**d, **var_kwargs)
+
+    def create_model(self, fields: Dict[str, Any], takes_args: bool, takes_kwargs: bool, config: 'ConfigType') -> None:
+        pos_args = len(self.arg_mapping)
+
+        class CustomConfig:
+            pass
+
+        if not TYPE_CHECKING:  # pragma: no branch
+            if isinstance(config, dict):
+                CustomConfig = type('Config', (), config)  # noqa: F811
+            elif config is not None:
+                CustomConfig = config  # noqa: F811
+
+        if hasattr(CustomConfig, 'fields') or hasattr(CustomConfig, 'alias_generator'):
+            raise ConfigError(
+                'Setting the "fields" and "alias_generator" property on custom Config for '
+                '@validate_arguments is not yet supported, please remove.'
+            )
+
+        class DecoratorBaseModel(BaseModel):
+            @validator(self.v_args_name, check_fields=False, allow_reuse=True)
+            def check_args(cls, v: Optional[List[Any]]) -> Optional[List[Any]]:
+                if takes_args or v is None:
+                    return v
+
+                raise TypeError(f'{pos_args} positional arguments expected but {pos_args + len(v)} given')
+
+            @validator(self.v_kwargs_name, check_fields=False, allow_reuse=True)
+            def check_kwargs(cls, v: Optional[Dict[str, Any]]) -> Optional[Dict[str, Any]]:
+                if takes_kwargs or v is None:
+                    return v
+
+                plural = '' if len(v) == 1 else 's'
+                keys = ', '.join(map(repr, v.keys()))
+                raise TypeError(f'unexpected keyword argument{plural}: {keys}')
+
+            @validator(V_POSITIONAL_ONLY_NAME, check_fields=False, allow_reuse=True)
+            def check_positional_only(cls, v: Optional[List[str]]) -> None:
+                if v is None:
+                    return
+
+                plural = '' if len(v) == 1 else 's'
+                keys = ', '.join(map(repr, v))
+                raise TypeError(f'positional-only argument{plural} passed as keyword argument{plural}: {keys}')
+
+            @validator(V_DUPLICATE_KWARGS, check_fields=False, allow_reuse=True)
+            def check_duplicate_kwargs(cls, v: Optional[List[str]]) -> None:
+                if v is None:
+                    return
+
+                plural = '' if len(v) == 1 else 's'
+                keys = ', '.join(map(repr, v))
+                raise TypeError(f'multiple values for argument{plural}: {keys}')
+
+            class Config(CustomConfig):
+                extra = getattr(CustomConfig, 'extra', Extra.forbid)
+
+        self.model = create_model(to_camel(self.raw_function.__name__), __base__=DecoratorBaseModel, **fields)
diff --git a/.venv/lib/python3.12/site-packages/pydantic/v1/env_settings.py b/.venv/lib/python3.12/site-packages/pydantic/v1/env_settings.py
new file mode 100644
index 00000000..5f6f2175
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pydantic/v1/env_settings.py
@@ -0,0 +1,350 @@
+import os
+import warnings
+from pathlib import Path
+from typing import AbstractSet, Any, Callable, ClassVar, Dict, List, Mapping, Optional, Tuple, Type, Union
+
+from pydantic.v1.config import BaseConfig, Extra
+from pydantic.v1.fields import ModelField
+from pydantic.v1.main import BaseModel
+from pydantic.v1.types import JsonWrapper
+from pydantic.v1.typing import StrPath, display_as_type, get_origin, is_union
+from pydantic.v1.utils import deep_update, lenient_issubclass, path_type, sequence_like
+
+env_file_sentinel = str(object())
+
+SettingsSourceCallable = Callable[['BaseSettings'], Dict[str, Any]]
+DotenvType = Union[StrPath, List[StrPath], Tuple[StrPath, ...]]
+
+
+class SettingsError(ValueError):
+    pass
+
+
+class BaseSettings(BaseModel):
+    """
+    Base class for settings, allowing values to be overridden by environment variables.
+
+    This is useful in production for secrets you do not wish to save in code, it plays nicely with docker(-compose),
+    Heroku and any 12 factor app design.
+    """
+
+    def __init__(
+        __pydantic_self__,
+        _env_file: Optional[DotenvType] = env_file_sentinel,
+        _env_file_encoding: Optional[str] = None,
+        _env_nested_delimiter: Optional[str] = None,
+        _secrets_dir: Optional[StrPath] = None,
+        **values: Any,
+    ) -> None:
+        # Uses something other than `self` the first arg to allow "self" as a settable attribute
+        super().__init__(
+            **__pydantic_self__._build_values(
+                values,
+                _env_file=_env_file,
+                _env_file_encoding=_env_file_encoding,
+                _env_nested_delimiter=_env_nested_delimiter,
+                _secrets_dir=_secrets_dir,
+            )
+        )
+
+    def _build_values(
+        self,
+        init_kwargs: Dict[str, Any],
+        _env_file: Optional[DotenvType] = None,
+        _env_file_encoding: Optional[str] = None,
+        _env_nested_delimiter: Optional[str] = None,
+        _secrets_dir: Optional[StrPath] = None,
+    ) -> Dict[str, Any]:
+        # Configure built-in sources
+        init_settings = InitSettingsSource(init_kwargs=init_kwargs)
+        env_settings = EnvSettingsSource(
+            env_file=(_env_file if _env_file != env_file_sentinel else self.__config__.env_file),
+            env_file_encoding=(
+                _env_file_encoding if _env_file_encoding is not None else self.__config__.env_file_encoding
+            ),
+            env_nested_delimiter=(
+                _env_nested_delimiter if _env_nested_delimiter is not None else self.__config__.env_nested_delimiter
+            ),
+            env_prefix_len=len(self.__config__.env_prefix),
+        )
+        file_secret_settings = SecretsSettingsSource(secrets_dir=_secrets_dir or self.__config__.secrets_dir)
+        # Provide a hook to set built-in sources priority and add / remove sources
+        sources = self.__config__.customise_sources(
+            init_settings=init_settings, env_settings=env_settings, file_secret_settings=file_secret_settings
+        )
+        if sources:
+            return deep_update(*reversed([source(self) for source in sources]))
+        else:
+            # no one should mean to do this, but I think returning an empty dict is marginally preferable
+            # to an informative error and much better than a confusing error
+            return {}
+
+    class Config(BaseConfig):
+        env_prefix: str = ''
+        env_file: Optional[DotenvType] = None
+        env_file_encoding: Optional[str] = None
+        env_nested_delimiter: Optional[str] = None
+        secrets_dir: Optional[StrPath] = None
+        validate_all: bool = True
+        extra: Extra = Extra.forbid
+        arbitrary_types_allowed: bool = True
+        case_sensitive: bool = False
+
+        @classmethod
+        def prepare_field(cls, field: ModelField) -> None:
+            env_names: Union[List[str], AbstractSet[str]]
+            field_info_from_config = cls.get_field_info(field.name)
+
+            env = field_info_from_config.get('env') or field.field_info.extra.get('env')
+            if env is None:
+                if field.has_alias:
+                    warnings.warn(
+                        'aliases are no longer used by BaseSettings to define which environment variables to read. '
+                        'Instead use the "env" field setting. '
+                        'See https://pydantic-docs.helpmanual.io/usage/settings/#environment-variable-names',
+                        FutureWarning,
+                    )
+                env_names = {cls.env_prefix + field.name}
+            elif isinstance(env, str):
+                env_names = {env}
+            elif isinstance(env, (set, frozenset)):
+                env_names = env
+            elif sequence_like(env):
+                env_names = list(env)
+            else:
+                raise TypeError(f'invalid field env: {env!r} ({display_as_type(env)}); should be string, list or set')
+
+            if not cls.case_sensitive:
+                env_names = env_names.__class__(n.lower() for n in env_names)
+            field.field_info.extra['env_names'] = env_names
+
+        @classmethod
+        def customise_sources(
+            cls,
+            init_settings: SettingsSourceCallable,
+            env_settings: SettingsSourceCallable,
+            file_secret_settings: SettingsSourceCallable,
+        ) -> Tuple[SettingsSourceCallable, ...]:
+            return init_settings, env_settings, file_secret_settings
+
+        @classmethod
+        def parse_env_var(cls, field_name: str, raw_val: str) -> Any:
+            return cls.json_loads(raw_val)
+
+    # populated by the metaclass using the Config class defined above, annotated here to help IDEs only
+    __config__: ClassVar[Type[Config]]
+
+
+class InitSettingsSource:
+    __slots__ = ('init_kwargs',)
+
+    def __init__(self, init_kwargs: Dict[str, Any]):
+        self.init_kwargs = init_kwargs
+
+    def __call__(self, settings: BaseSettings) -> Dict[str, Any]:
+        return self.init_kwargs
+
+    def __repr__(self) -> str:
+        return f'InitSettingsSource(init_kwargs={self.init_kwargs!r})'
+
+
+class EnvSettingsSource:
+    __slots__ = ('env_file', 'env_file_encoding', 'env_nested_delimiter', 'env_prefix_len')
+
+    def __init__(
+        self,
+        env_file: Optional[DotenvType],
+        env_file_encoding: Optional[str],
+        env_nested_delimiter: Optional[str] = None,
+        env_prefix_len: int = 0,
+    ):
+        self.env_file: Optional[DotenvType] = env_file
+        self.env_file_encoding: Optional[str] = env_file_encoding
+        self.env_nested_delimiter: Optional[str] = env_nested_delimiter
+        self.env_prefix_len: int = env_prefix_len
+
+    def __call__(self, settings: BaseSettings) -> Dict[str, Any]:  # noqa C901
+        """
+        Build environment variables suitable for passing to the Model.
+        """
+        d: Dict[str, Any] = {}
+
+        if settings.__config__.case_sensitive:
+            env_vars: Mapping[str, Optional[str]] = os.environ
+        else:
+            env_vars = {k.lower(): v for k, v in os.environ.items()}
+
+        dotenv_vars = self._read_env_files(settings.__config__.case_sensitive)
+        if dotenv_vars:
+            env_vars = {**dotenv_vars, **env_vars}
+
+        for field in settings.__fields__.values():
+            env_val: Optional[str] = None
+            for env_name in field.field_info.extra['env_names']:
+                env_val = env_vars.get(env_name)
+                if env_val is not None:
+                    break
+
+            is_complex, allow_parse_failure = self.field_is_complex(field)
+            if is_complex:
+                if env_val is None:
+                    # field is complex but no value found so far, try explode_env_vars
+                    env_val_built = self.explode_env_vars(field, env_vars)
+                    if env_val_built:
+                        d[field.alias] = env_val_built
+                else:
+                    # field is complex and there's a value, decode that as JSON, then add explode_env_vars
+                    try:
+                        env_val = settings.__config__.parse_env_var(field.name, env_val)
+                    except ValueError as e:
+                        if not allow_parse_failure:
+                            raise SettingsError(f'error parsing env var "{env_name}"') from e
+
+                    if isinstance(env_val, dict):
+                        d[field.alias] = deep_update(env_val, self.explode_env_vars(field, env_vars))
+                    else:
+                        d[field.alias] = env_val
+            elif env_val is not None:
+                # simplest case, field is not complex, we only need to add the value if it was found
+                d[field.alias] = env_val
+
+        return d
+
+    def _read_env_files(self, case_sensitive: bool) -> Dict[str, Optional[str]]:
+        env_files = self.env_file
+        if env_files is None:
+            return {}
+
+        if isinstance(env_files, (str, os.PathLike)):
+            env_files = [env_files]
+
+        dotenv_vars = {}
+        for env_file in env_files:
+            env_path = Path(env_file).expanduser()
+            if env_path.is_file():
+                dotenv_vars.update(
+                    read_env_file(env_path, encoding=self.env_file_encoding, case_sensitive=case_sensitive)
+                )
+
+        return dotenv_vars
+
+    def field_is_complex(self, field: ModelField) -> Tuple[bool, bool]:
+        """
+        Find out if a field is complex, and if so whether JSON errors should be ignored
+        """
+        if lenient_issubclass(field.annotation, JsonWrapper):
+            return False, False
+
+        if field.is_complex():
+            allow_parse_failure = False
+        elif is_union(get_origin(field.type_)) and field.sub_fields and any(f.is_complex() for f in field.sub_fields):
+            allow_parse_failure = True
+        else:
+            return False, False
+
+        return True, allow_parse_failure
+
+    def explode_env_vars(self, field: ModelField, env_vars: Mapping[str, Optional[str]]) -> Dict[str, Any]:
+        """
+        Process env_vars and extract the values of keys containing env_nested_delimiter into nested dictionaries.
+
+        This is applied to a single field, hence filtering by env_var prefix.
+        """
+        prefixes = [f'{env_name}{self.env_nested_delimiter}' for env_name in field.field_info.extra['env_names']]
+        result: Dict[str, Any] = {}
+        for env_name, env_val in env_vars.items():
+            if not any(env_name.startswith(prefix) for prefix in prefixes):
+                continue
+            # we remove the prefix before splitting in case the prefix has characters in common with the delimiter
+            env_name_without_prefix = env_name[self.env_prefix_len :]
+            _, *keys, last_key = env_name_without_prefix.split(self.env_nested_delimiter)
+            env_var = result
+            for key in keys:
+                env_var = env_var.setdefault(key, {})
+            env_var[last_key] = env_val
+
+        return result
+
+    def __repr__(self) -> str:
+        return (
+            f'EnvSettingsSource(env_file={self.env_file!r}, env_file_encoding={self.env_file_encoding!r}, '
+            f'env_nested_delimiter={self.env_nested_delimiter!r})'
+        )
+
+
+class SecretsSettingsSource:
+    __slots__ = ('secrets_dir',)
+
+    def __init__(self, secrets_dir: Optional[StrPath]):
+        self.secrets_dir: Optional[StrPath] = secrets_dir
+
+    def __call__(self, settings: BaseSettings) -> Dict[str, Any]:
+        """
+        Build fields from "secrets" files.
+        """
+        secrets: Dict[str, Optional[str]] = {}
+
+        if self.secrets_dir is None:
+            return secrets
+
+        secrets_path = Path(self.secrets_dir).expanduser()
+
+        if not secrets_path.exists():
+            warnings.warn(f'directory "{secrets_path}" does not exist')
+            return secrets
+
+        if not secrets_path.is_dir():
+            raise SettingsError(f'secrets_dir must reference a directory, not a {path_type(secrets_path)}')
+
+        for field in settings.__fields__.values():
+            for env_name in field.field_info.extra['env_names']:
+                path = find_case_path(secrets_path, env_name, settings.__config__.case_sensitive)
+                if not path:
+                    # path does not exist, we currently don't return a warning for this
+                    continue
+
+                if path.is_file():
+                    secret_value = path.read_text().strip()
+                    if field.is_complex():
+                        try:
+                            secret_value = settings.__config__.parse_env_var(field.name, secret_value)
+                        except ValueError as e:
+                            raise SettingsError(f'error parsing env var "{env_name}"') from e
+
+                    secrets[field.alias] = secret_value
+                else:
+                    warnings.warn(
+                        f'attempted to load secret file "{path}" but found a {path_type(path)} instead.',
+                        stacklevel=4,
+                    )
+        return secrets
+
+    def __repr__(self) -> str:
+        return f'SecretsSettingsSource(secrets_dir={self.secrets_dir!r})'
+
+
+def read_env_file(
+    file_path: StrPath, *, encoding: str = None, case_sensitive: bool = False
+) -> Dict[str, Optional[str]]:
+    try:
+        from dotenv import dotenv_values
+    except ImportError as e:
+        raise ImportError('python-dotenv is not installed, run `pip install pydantic[dotenv]`') from e
+
+    file_vars: Dict[str, Optional[str]] = dotenv_values(file_path, encoding=encoding or 'utf8')
+    if not case_sensitive:
+        return {k.lower(): v for k, v in file_vars.items()}
+    else:
+        return file_vars
+
+
+def find_case_path(dir_path: Path, file_name: str, case_sensitive: bool) -> Optional[Path]:
+    """
+    Find a file within path's directory matching filename, optionally ignoring case.
+    """
+    for f in dir_path.iterdir():
+        if f.name == file_name:
+            return f
+        elif not case_sensitive and f.name.lower() == file_name.lower():
+            return f
+    return None
diff --git a/.venv/lib/python3.12/site-packages/pydantic/v1/error_wrappers.py b/.venv/lib/python3.12/site-packages/pydantic/v1/error_wrappers.py
new file mode 100644
index 00000000..bc7f2631
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pydantic/v1/error_wrappers.py
@@ -0,0 +1,161 @@
+import json
+from typing import TYPE_CHECKING, Any, Dict, Generator, List, Optional, Sequence, Tuple, Type, Union
+
+from pydantic.v1.json import pydantic_encoder
+from pydantic.v1.utils import Representation
+
+if TYPE_CHECKING:
+    from typing_extensions import TypedDict
+
+    from pydantic.v1.config import BaseConfig
+    from pydantic.v1.types import ModelOrDc
+    from pydantic.v1.typing import ReprArgs
+
+    Loc = Tuple[Union[int, str], ...]
+
+    class _ErrorDictRequired(TypedDict):
+        loc: Loc
+        msg: str
+        type: str
+
+    class ErrorDict(_ErrorDictRequired, total=False):
+        ctx: Dict[str, Any]
+
+
+__all__ = 'ErrorWrapper', 'ValidationError'
+
+
+class ErrorWrapper(Representation):
+    __slots__ = 'exc', '_loc'
+
+    def __init__(self, exc: Exception, loc: Union[str, 'Loc']) -> None:
+        self.exc = exc
+        self._loc = loc
+
+    def loc_tuple(self) -> 'Loc':
+        if isinstance(self._loc, tuple):
+            return self._loc
+        else:
+            return (self._loc,)
+
+    def __repr_args__(self) -> 'ReprArgs':
+        return [('exc', self.exc), ('loc', self.loc_tuple())]
+
+
+# ErrorList is something like Union[List[Union[List[ErrorWrapper], ErrorWrapper]], ErrorWrapper]
+# but recursive, therefore just use:
+ErrorList = Union[Sequence[Any], ErrorWrapper]
+
+
+class ValidationError(Representation, ValueError):
+    __slots__ = 'raw_errors', 'model', '_error_cache'
+
+    def __init__(self, errors: Sequence[ErrorList], model: 'ModelOrDc') -> None:
+        self.raw_errors = errors
+        self.model = model
+        self._error_cache: Optional[List['ErrorDict']] = None
+
+    def errors(self) -> List['ErrorDict']:
+        if self._error_cache is None:
+            try:
+                config = self.model.__config__  # type: ignore
+            except AttributeError:
+                config = self.model.__pydantic_model__.__config__  # type: ignore
+            self._error_cache = list(flatten_errors(self.raw_errors, config))
+        return self._error_cache
+
+    def json(self, *, indent: Union[None, int, str] = 2) -> str:
+        return json.dumps(self.errors(), indent=indent, default=pydantic_encoder)
+
+    def __str__(self) -> str:
+        errors = self.errors()
+        no_errors = len(errors)
+        return (
+            f'{no_errors} validation error{"" if no_errors == 1 else "s"} for {self.model.__name__}\n'
+            f'{display_errors(errors)}'
+        )
+
+    def __repr_args__(self) -> 'ReprArgs':
+        return [('model', self.model.__name__), ('errors', self.errors())]
+
+
+def display_errors(errors: List['ErrorDict']) -> str:
+    return '\n'.join(f'{_display_error_loc(e)}\n  {e["msg"]} ({_display_error_type_and_ctx(e)})' for e in errors)
+
+
+def _display_error_loc(error: 'ErrorDict') -> str:
+    return ' -> '.join(str(e) for e in error['loc'])
+
+
+def _display_error_type_and_ctx(error: 'ErrorDict') -> str:
+    t = 'type=' + error['type']
+    ctx = error.get('ctx')
+    if ctx:
+        return t + ''.join(f'; {k}={v}' for k, v in ctx.items())
+    else:
+        return t
+
+
+def flatten_errors(
+    errors: Sequence[Any], config: Type['BaseConfig'], loc: Optional['Loc'] = None
+) -> Generator['ErrorDict', None, None]:
+    for error in errors:
+        if isinstance(error, ErrorWrapper):
+            if loc:
+                error_loc = loc + error.loc_tuple()
+            else:
+                error_loc = error.loc_tuple()
+
+            if isinstance(error.exc, ValidationError):
+                yield from flatten_errors(error.exc.raw_errors, config, error_loc)
+            else:
+                yield error_dict(error.exc, config, error_loc)
+        elif isinstance(error, list):
+            yield from flatten_errors(error, config, loc=loc)
+        else:
+            raise RuntimeError(f'Unknown error object: {error}')
+
+
+def error_dict(exc: Exception, config: Type['BaseConfig'], loc: 'Loc') -> 'ErrorDict':
+    type_ = get_exc_type(exc.__class__)
+    msg_template = config.error_msg_templates.get(type_) or getattr(exc, 'msg_template', None)
+    ctx = exc.__dict__
+    if msg_template:
+        msg = msg_template.format(**ctx)
+    else:
+        msg = str(exc)
+
+    d: 'ErrorDict' = {'loc': loc, 'msg': msg, 'type': type_}
+
+    if ctx:
+        d['ctx'] = ctx
+
+    return d
+
+
+_EXC_TYPE_CACHE: Dict[Type[Exception], str] = {}
+
+
+def get_exc_type(cls: Type[Exception]) -> str:
+    # slightly more efficient than using lru_cache since we don't need to worry about the cache filling up
+    try:
+        return _EXC_TYPE_CACHE[cls]
+    except KeyError:
+        r = _get_exc_type(cls)
+        _EXC_TYPE_CACHE[cls] = r
+        return r
+
+
+def _get_exc_type(cls: Type[Exception]) -> str:
+    if issubclass(cls, AssertionError):
+        return 'assertion_error'
+
+    base_name = 'type_error' if issubclass(cls, TypeError) else 'value_error'
+    if cls in (TypeError, ValueError):
+        # just TypeError or ValueError, no extra code
+        return base_name
+
+    # if it's not a TypeError or ValueError, we just take the lowercase of the exception name
+    # no chaining or snake case logic, use "code" for more complex error types.
+    code = getattr(cls, 'code', None) or cls.__name__.replace('Error', '').lower()
+    return base_name + '.' + code
diff --git a/.venv/lib/python3.12/site-packages/pydantic/v1/errors.py b/.venv/lib/python3.12/site-packages/pydantic/v1/errors.py
new file mode 100644
index 00000000..6e864425
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pydantic/v1/errors.py
@@ -0,0 +1,646 @@
+from decimal import Decimal
+from pathlib import Path
+from typing import TYPE_CHECKING, Any, Callable, Sequence, Set, Tuple, Type, Union
+
+from pydantic.v1.typing import display_as_type
+
+if TYPE_CHECKING:
+    from pydantic.v1.typing import DictStrAny
+
+# explicitly state exports to avoid "from pydantic.v1.errors import *" also importing Decimal, Path etc.
+__all__ = (
+    'PydanticTypeError',
+    'PydanticValueError',
+    'ConfigError',
+    'MissingError',
+    'ExtraError',
+    'NoneIsNotAllowedError',
+    'NoneIsAllowedError',
+    'WrongConstantError',
+    'NotNoneError',
+    'BoolError',
+    'BytesError',
+    'DictError',
+    'EmailError',
+    'UrlError',
+    'UrlSchemeError',
+    'UrlSchemePermittedError',
+    'UrlUserInfoError',
+    'UrlHostError',
+    'UrlHostTldError',
+    'UrlPortError',
+    'UrlExtraError',
+    'EnumError',
+    'IntEnumError',
+    'EnumMemberError',
+    'IntegerError',
+    'FloatError',
+    'PathError',
+    'PathNotExistsError',
+    'PathNotAFileError',
+    'PathNotADirectoryError',
+    'PyObjectError',
+    'SequenceError',
+    'ListError',
+    'SetError',
+    'FrozenSetError',
+    'TupleError',
+    'TupleLengthError',
+    'ListMinLengthError',
+    'ListMaxLengthError',
+    'ListUniqueItemsError',
+    'SetMinLengthError',
+    'SetMaxLengthError',
+    'FrozenSetMinLengthError',
+    'FrozenSetMaxLengthError',
+    'AnyStrMinLengthError',
+    'AnyStrMaxLengthError',
+    'StrError',
+    'StrRegexError',
+    'NumberNotGtError',
+    'NumberNotGeError',
+    'NumberNotLtError',
+    'NumberNotLeError',
+    'NumberNotMultipleError',
+    'DecimalError',
+    'DecimalIsNotFiniteError',
+    'DecimalMaxDigitsError',
+    'DecimalMaxPlacesError',
+    'DecimalWholeDigitsError',
+    'DateTimeError',
+    'DateError',
+    'DateNotInThePastError',
+    'DateNotInTheFutureError',
+    'TimeError',
+    'DurationError',
+    'HashableError',
+    'UUIDError',
+    'UUIDVersionError',
+    'ArbitraryTypeError',
+    'ClassError',
+    'SubclassError',
+    'JsonError',
+    'JsonTypeError',
+    'PatternError',
+    'DataclassTypeError',
+    'CallableError',
+    'IPvAnyAddressError',
+    'IPvAnyInterfaceError',
+    'IPvAnyNetworkError',
+    'IPv4AddressError',
+    'IPv6AddressError',
+    'IPv4NetworkError',
+    'IPv6NetworkError',
+    'IPv4InterfaceError',
+    'IPv6InterfaceError',
+    'ColorError',
+    'StrictBoolError',
+    'NotDigitError',
+    'LuhnValidationError',
+    'InvalidLengthForBrand',
+    'InvalidByteSize',
+    'InvalidByteSizeUnit',
+    'MissingDiscriminator',
+    'InvalidDiscriminator',
+)
+
+
+def cls_kwargs(cls: Type['PydanticErrorMixin'], ctx: 'DictStrAny') -> 'PydanticErrorMixin':
+    """
+    For built-in exceptions like ValueError or TypeError, we need to implement
+    __reduce__ to override the default behaviour (instead of __getstate__/__setstate__)
+    By default pickle protocol 2 calls `cls.__new__(cls, *args)`.
+    Since we only use kwargs, we need a little constructor to change that.
+    Note: the callable can't be a lambda as pickle looks in the namespace to find it
+    """
+    return cls(**ctx)
+
+
+class PydanticErrorMixin:
+    code: str
+    msg_template: str
+
+    def __init__(self, **ctx: Any) -> None:
+        self.__dict__ = ctx
+
+    def __str__(self) -> str:
+        return self.msg_template.format(**self.__dict__)
+
+    def __reduce__(self) -> Tuple[Callable[..., 'PydanticErrorMixin'], Tuple[Type['PydanticErrorMixin'], 'DictStrAny']]:
+        return cls_kwargs, (self.__class__, self.__dict__)
+
+
+class PydanticTypeError(PydanticErrorMixin, TypeError):
+    pass
+
+
+class PydanticValueError(PydanticErrorMixin, ValueError):
+    pass
+
+
+class ConfigError(RuntimeError):
+    pass
+
+
+class MissingError(PydanticValueError):
+    msg_template = 'field required'
+
+
+class ExtraError(PydanticValueError):
+    msg_template = 'extra fields not permitted'
+
+
+class NoneIsNotAllowedError(PydanticTypeError):
+    code = 'none.not_allowed'
+    msg_template = 'none is not an allowed value'
+
+
+class NoneIsAllowedError(PydanticTypeError):
+    code = 'none.allowed'
+    msg_template = 'value is not none'
+
+
+class WrongConstantError(PydanticValueError):
+    code = 'const'
+
+    def __str__(self) -> str:
+        permitted = ', '.join(repr(v) for v in self.permitted)  # type: ignore
+        return f'unexpected value; permitted: {permitted}'
+
+
+class NotNoneError(PydanticTypeError):
+    code = 'not_none'
+    msg_template = 'value is not None'
+
+
+class BoolError(PydanticTypeError):
+    msg_template = 'value could not be parsed to a boolean'
+
+
+class BytesError(PydanticTypeError):
+    msg_template = 'byte type expected'
+
+
+class DictError(PydanticTypeError):
+    msg_template = 'value is not a valid dict'
+
+
+class EmailError(PydanticValueError):
+    msg_template = 'value is not a valid email address'
+
+
+class UrlError(PydanticValueError):
+    code = 'url'
+
+
+class UrlSchemeError(UrlError):
+    code = 'url.scheme'
+    msg_template = 'invalid or missing URL scheme'
+
+
+class UrlSchemePermittedError(UrlError):
+    code = 'url.scheme'
+    msg_template = 'URL scheme not permitted'
+
+    def __init__(self, allowed_schemes: Set[str]):
+        super().__init__(allowed_schemes=allowed_schemes)
+
+
+class UrlUserInfoError(UrlError):
+    code = 'url.userinfo'
+    msg_template = 'userinfo required in URL but missing'
+
+
+class UrlHostError(UrlError):
+    code = 'url.host'
+    msg_template = 'URL host invalid'
+
+
+class UrlHostTldError(UrlError):
+    code = 'url.host'
+    msg_template = 'URL host invalid, top level domain required'
+
+
+class UrlPortError(UrlError):
+    code = 'url.port'
+    msg_template = 'URL port invalid, port cannot exceed 65535'
+
+
+class UrlExtraError(UrlError):
+    code = 'url.extra'
+    msg_template = 'URL invalid, extra characters found after valid URL: {extra!r}'
+
+
+class EnumMemberError(PydanticTypeError):
+    code = 'enum'
+
+    def __str__(self) -> str:
+        permitted = ', '.join(repr(v.value) for v in self.enum_values)  # type: ignore
+        return f'value is not a valid enumeration member; permitted: {permitted}'
+
+
+class IntegerError(PydanticTypeError):
+    msg_template = 'value is not a valid integer'
+
+
+class FloatError(PydanticTypeError):
+    msg_template = 'value is not a valid float'
+
+
+class PathError(PydanticTypeError):
+    msg_template = 'value is not a valid path'
+
+
+class _PathValueError(PydanticValueError):
+    def __init__(self, *, path: Path) -> None:
+        super().__init__(path=str(path))
+
+
+class PathNotExistsError(_PathValueError):
+    code = 'path.not_exists'
+    msg_template = 'file or directory at path "{path}" does not exist'
+
+
+class PathNotAFileError(_PathValueError):
+    code = 'path.not_a_file'
+    msg_template = 'path "{path}" does not point to a file'
+
+
+class PathNotADirectoryError(_PathValueError):
+    code = 'path.not_a_directory'
+    msg_template = 'path "{path}" does not point to a directory'
+
+
+class PyObjectError(PydanticTypeError):
+    msg_template = 'ensure this value contains valid import path or valid callable: {error_message}'
+
+
+class SequenceError(PydanticTypeError):
+    msg_template = 'value is not a valid sequence'
+
+
+class IterableError(PydanticTypeError):
+    msg_template = 'value is not a valid iterable'
+
+
+class ListError(PydanticTypeError):
+    msg_template = 'value is not a valid list'
+
+
+class SetError(PydanticTypeError):
+    msg_template = 'value is not a valid set'
+
+
+class FrozenSetError(PydanticTypeError):
+    msg_template = 'value is not a valid frozenset'
+
+
+class DequeError(PydanticTypeError):
+    msg_template = 'value is not a valid deque'
+
+
+class TupleError(PydanticTypeError):
+    msg_template = 'value is not a valid tuple'
+
+
+class TupleLengthError(PydanticValueError):
+    code = 'tuple.length'
+    msg_template = 'wrong tuple length {actual_length}, expected {expected_length}'
+
+    def __init__(self, *, actual_length: int, expected_length: int) -> None:
+        super().__init__(actual_length=actual_length, expected_length=expected_length)
+
+
+class ListMinLengthError(PydanticValueError):
+    code = 'list.min_items'
+    msg_template = 'ensure this value has at least {limit_value} items'
+
+    def __init__(self, *, limit_value: int) -> None:
+        super().__init__(limit_value=limit_value)
+
+
+class ListMaxLengthError(PydanticValueError):
+    code = 'list.max_items'
+    msg_template = 'ensure this value has at most {limit_value} items'
+
+    def __init__(self, *, limit_value: int) -> None:
+        super().__init__(limit_value=limit_value)
+
+
+class ListUniqueItemsError(PydanticValueError):
+    code = 'list.unique_items'
+    msg_template = 'the list has duplicated items'
+
+
+class SetMinLengthError(PydanticValueError):
+    code = 'set.min_items'
+    msg_template = 'ensure this value has at least {limit_value} items'
+
+    def __init__(self, *, limit_value: int) -> None:
+        super().__init__(limit_value=limit_value)
+
+
+class SetMaxLengthError(PydanticValueError):
+    code = 'set.max_items'
+    msg_template = 'ensure this value has at most {limit_value} items'
+
+    def __init__(self, *, limit_value: int) -> None:
+        super().__init__(limit_value=limit_value)
+
+
+class FrozenSetMinLengthError(PydanticValueError):
+    code = 'frozenset.min_items'
+    msg_template = 'ensure this value has at least {limit_value} items'
+
+    def __init__(self, *, limit_value: int) -> None:
+        super().__init__(limit_value=limit_value)
+
+
+class FrozenSetMaxLengthError(PydanticValueError):
+    code = 'frozenset.max_items'
+    msg_template = 'ensure this value has at most {limit_value} items'
+
+    def __init__(self, *, limit_value: int) -> None:
+        super().__init__(limit_value=limit_value)
+
+
+class AnyStrMinLengthError(PydanticValueError):
+    code = 'any_str.min_length'
+    msg_template = 'ensure this value has at least {limit_value} characters'
+
+    def __init__(self, *, limit_value: int) -> None:
+        super().__init__(limit_value=limit_value)
+
+
+class AnyStrMaxLengthError(PydanticValueError):
+    code = 'any_str.max_length'
+    msg_template = 'ensure this value has at most {limit_value} characters'
+
+    def __init__(self, *, limit_value: int) -> None:
+        super().__init__(limit_value=limit_value)
+
+
+class StrError(PydanticTypeError):
+    msg_template = 'str type expected'
+
+
+class StrRegexError(PydanticValueError):
+    code = 'str.regex'
+    msg_template = 'string does not match regex "{pattern}"'
+
+    def __init__(self, *, pattern: str) -> None:
+        super().__init__(pattern=pattern)
+
+
+class _NumberBoundError(PydanticValueError):
+    def __init__(self, *, limit_value: Union[int, float, Decimal]) -> None:
+        super().__init__(limit_value=limit_value)
+
+
+class NumberNotGtError(_NumberBoundError):
+    code = 'number.not_gt'
+    msg_template = 'ensure this value is greater than {limit_value}'
+
+
+class NumberNotGeError(_NumberBoundError):
+    code = 'number.not_ge'
+    msg_template = 'ensure this value is greater than or equal to {limit_value}'
+
+
+class NumberNotLtError(_NumberBoundError):
+    code = 'number.not_lt'
+    msg_template = 'ensure this value is less than {limit_value}'
+
+
+class NumberNotLeError(_NumberBoundError):
+    code = 'number.not_le'
+    msg_template = 'ensure this value is less than or equal to {limit_value}'
+
+
+class NumberNotFiniteError(PydanticValueError):
+    code = 'number.not_finite_number'
+    msg_template = 'ensure this value is a finite number'
+
+
+class NumberNotMultipleError(PydanticValueError):
+    code = 'number.not_multiple'
+    msg_template = 'ensure this value is a multiple of {multiple_of}'
+
+    def __init__(self, *, multiple_of: Union[int, float, Decimal]) -> None:
+        super().__init__(multiple_of=multiple_of)
+
+
+class DecimalError(PydanticTypeError):
+    msg_template = 'value is not a valid decimal'
+
+
+class DecimalIsNotFiniteError(PydanticValueError):
+    code = 'decimal.not_finite'
+    msg_template = 'value is not a valid decimal'
+
+
+class DecimalMaxDigitsError(PydanticValueError):
+    code = 'decimal.max_digits'
+    msg_template = 'ensure that there are no more than {max_digits} digits in total'
+
+    def __init__(self, *, max_digits: int) -> None:
+        super().__init__(max_digits=max_digits)
+
+
+class DecimalMaxPlacesError(PydanticValueError):
+    code = 'decimal.max_places'
+    msg_template = 'ensure that there are no more than {decimal_places} decimal places'
+
+    def __init__(self, *, decimal_places: int) -> None:
+        super().__init__(decimal_places=decimal_places)
+
+
+class DecimalWholeDigitsError(PydanticValueError):
+    code = 'decimal.whole_digits'
+    msg_template = 'ensure that there are no more than {whole_digits} digits before the decimal point'
+
+    def __init__(self, *, whole_digits: int) -> None:
+        super().__init__(whole_digits=whole_digits)
+
+
+class DateTimeError(PydanticValueError):
+    msg_template = 'invalid datetime format'
+
+
+class DateError(PydanticValueError):
+    msg_template = 'invalid date format'
+
+
+class DateNotInThePastError(PydanticValueError):
+    code = 'date.not_in_the_past'
+    msg_template = 'date is not in the past'
+
+
+class DateNotInTheFutureError(PydanticValueError):
+    code = 'date.not_in_the_future'
+    msg_template = 'date is not in the future'
+
+
+class TimeError(PydanticValueError):
+    msg_template = 'invalid time format'
+
+
+class DurationError(PydanticValueError):
+    msg_template = 'invalid duration format'
+
+
+class HashableError(PydanticTypeError):
+    msg_template = 'value is not a valid hashable'
+
+
+class UUIDError(PydanticTypeError):
+    msg_template = 'value is not a valid uuid'
+
+
+class UUIDVersionError(PydanticValueError):
+    code = 'uuid.version'
+    msg_template = 'uuid version {required_version} expected'
+
+    def __init__(self, *, required_version: int) -> None:
+        super().__init__(required_version=required_version)
+
+
+class ArbitraryTypeError(PydanticTypeError):
+    code = 'arbitrary_type'
+    msg_template = 'instance of {expected_arbitrary_type} expected'
+
+    def __init__(self, *, expected_arbitrary_type: Type[Any]) -> None:
+        super().__init__(expected_arbitrary_type=display_as_type(expected_arbitrary_type))
+
+
+class ClassError(PydanticTypeError):
+    code = 'class'
+    msg_template = 'a class is expected'
+
+
+class SubclassError(PydanticTypeError):
+    code = 'subclass'
+    msg_template = 'subclass of {expected_class} expected'
+
+    def __init__(self, *, expected_class: Type[Any]) -> None:
+        super().__init__(expected_class=display_as_type(expected_class))
+
+
+class JsonError(PydanticValueError):
+    msg_template = 'Invalid JSON'
+
+
+class JsonTypeError(PydanticTypeError):
+    code = 'json'
+    msg_template = 'JSON object must be str, bytes or bytearray'
+
+
+class PatternError(PydanticValueError):
+    code = 'regex_pattern'
+    msg_template = 'Invalid regular expression'
+
+
+class DataclassTypeError(PydanticTypeError):
+    code = 'dataclass'
+    msg_template = 'instance of {class_name}, tuple or dict expected'
+
+
+class CallableError(PydanticTypeError):
+    msg_template = '{value} is not callable'
+
+
+class EnumError(PydanticTypeError):
+    code = 'enum_instance'
+    msg_template = '{value} is not a valid Enum instance'
+
+
+class IntEnumError(PydanticTypeError):
+    code = 'int_enum_instance'
+    msg_template = '{value} is not a valid IntEnum instance'
+
+
+class IPvAnyAddressError(PydanticValueError):
+    msg_template = 'value is not a valid IPv4 or IPv6 address'
+
+
+class IPvAnyInterfaceError(PydanticValueError):
+    msg_template = 'value is not a valid IPv4 or IPv6 interface'
+
+
+class IPvAnyNetworkError(PydanticValueError):
+    msg_template = 'value is not a valid IPv4 or IPv6 network'
+
+
+class IPv4AddressError(PydanticValueError):
+    msg_template = 'value is not a valid IPv4 address'
+
+
+class IPv6AddressError(PydanticValueError):
+    msg_template = 'value is not a valid IPv6 address'
+
+
+class IPv4NetworkError(PydanticValueError):
+    msg_template = 'value is not a valid IPv4 network'
+
+
+class IPv6NetworkError(PydanticValueError):
+    msg_template = 'value is not a valid IPv6 network'
+
+
+class IPv4InterfaceError(PydanticValueError):
+    msg_template = 'value is not a valid IPv4 interface'
+
+
+class IPv6InterfaceError(PydanticValueError):
+    msg_template = 'value is not a valid IPv6 interface'
+
+
+class ColorError(PydanticValueError):
+    msg_template = 'value is not a valid color: {reason}'
+
+
+class StrictBoolError(PydanticValueError):
+    msg_template = 'value is not a valid boolean'
+
+
+class NotDigitError(PydanticValueError):
+    code = 'payment_card_number.digits'
+    msg_template = 'card number is not all digits'
+
+
+class LuhnValidationError(PydanticValueError):
+    code = 'payment_card_number.luhn_check'
+    msg_template = 'card number is not luhn valid'
+
+
+class InvalidLengthForBrand(PydanticValueError):
+    code = 'payment_card_number.invalid_length_for_brand'
+    msg_template = 'Length for a {brand} card must be {required_length}'
+
+
+class InvalidByteSize(PydanticValueError):
+    msg_template = 'could not parse value and unit from byte string'
+
+
+class InvalidByteSizeUnit(PydanticValueError):
+    msg_template = 'could not interpret byte unit: {unit}'
+
+
+class MissingDiscriminator(PydanticValueError):
+    code = 'discriminated_union.missing_discriminator'
+    msg_template = 'Discriminator {discriminator_key!r} is missing in value'
+
+
+class InvalidDiscriminator(PydanticValueError):
+    code = 'discriminated_union.invalid_discriminator'
+    msg_template = (
+        'No match for discriminator {discriminator_key!r} and value {discriminator_value!r} '
+        '(allowed values: {allowed_values})'
+    )
+
+    def __init__(self, *, discriminator_key: str, discriminator_value: Any, allowed_values: Sequence[Any]) -> None:
+        super().__init__(
+            discriminator_key=discriminator_key,
+            discriminator_value=discriminator_value,
+            allowed_values=', '.join(map(repr, allowed_values)),
+        )
diff --git a/.venv/lib/python3.12/site-packages/pydantic/v1/fields.py b/.venv/lib/python3.12/site-packages/pydantic/v1/fields.py
new file mode 100644
index 00000000..002b60cd
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pydantic/v1/fields.py
@@ -0,0 +1,1253 @@
+import copy
+import re
+from collections import Counter as CollectionCounter, defaultdict, deque
+from collections.abc import Callable, Hashable as CollectionsHashable, Iterable as CollectionsIterable
+from typing import (
+    TYPE_CHECKING,
+    Any,
+    Counter,
+    DefaultDict,
+    Deque,
+    Dict,
+    ForwardRef,
+    FrozenSet,
+    Generator,
+    Iterable,
+    Iterator,
+    List,
+    Mapping,
+    Optional,
+    Pattern,
+    Sequence,
+    Set,
+    Tuple,
+    Type,
+    TypeVar,
+    Union,
+)
+
+from typing_extensions import Annotated, Final
+
+from pydantic.v1 import errors as errors_
+from pydantic.v1.class_validators import Validator, make_generic_validator, prep_validators
+from pydantic.v1.error_wrappers import ErrorWrapper
+from pydantic.v1.errors import ConfigError, InvalidDiscriminator, MissingDiscriminator, NoneIsNotAllowedError
+from pydantic.v1.types import Json, JsonWrapper
+from pydantic.v1.typing import (
+    NoArgAnyCallable,
+    convert_generics,
+    display_as_type,
+    get_args,
+    get_origin,
+    is_finalvar,
+    is_literal_type,
+    is_new_type,
+    is_none_type,
+    is_typeddict,
+    is_typeddict_special,
+    is_union,
+    new_type_supertype,
+)
+from pydantic.v1.utils import (
+    PyObjectStr,
+    Representation,
+    ValueItems,
+    get_discriminator_alias_and_values,
+    get_unique_discriminator_alias,
+    lenient_isinstance,
+    lenient_issubclass,
+    sequence_like,
+    smart_deepcopy,
+)
+from pydantic.v1.validators import constant_validator, dict_validator, find_validators, validate_json
+
+Required: Any = Ellipsis
+
+T = TypeVar('T')
+
+
+class UndefinedType:
+    def __repr__(self) -> str:
+        return 'PydanticUndefined'
+
+    def __copy__(self: T) -> T:
+        return self
+
+    def __reduce__(self) -> str:
+        return 'Undefined'
+
+    def __deepcopy__(self: T, _: Any) -> T:
+        return self
+
+
+Undefined = UndefinedType()
+
+if TYPE_CHECKING:
+    from pydantic.v1.class_validators import ValidatorsList
+    from pydantic.v1.config import BaseConfig
+    from pydantic.v1.error_wrappers import ErrorList
+    from pydantic.v1.types import ModelOrDc
+    from pydantic.v1.typing import AbstractSetIntStr, MappingIntStrAny, ReprArgs
+
+    ValidateReturn = Tuple[Optional[Any], Optional[ErrorList]]
+    LocStr = Union[Tuple[Union[int, str], ...], str]
+    BoolUndefined = Union[bool, UndefinedType]
+
+
+class FieldInfo(Representation):
+    """
+    Captures extra information about a field.
+    """
+
+    __slots__ = (
+        'default',
+        'default_factory',
+        'alias',
+        'alias_priority',
+        'title',
+        'description',
+        'exclude',
+        'include',
+        'const',
+        'gt',
+        'ge',
+        'lt',
+        'le',
+        'multiple_of',
+        'allow_inf_nan',
+        'max_digits',
+        'decimal_places',
+        'min_items',
+        'max_items',
+        'unique_items',
+        'min_length',
+        'max_length',
+        'allow_mutation',
+        'repr',
+        'regex',
+        'discriminator',
+        'extra',
+    )
+
+    # field constraints with the default value, it's also used in update_from_config below
+    __field_constraints__ = {
+        'min_length': None,
+        'max_length': None,
+        'regex': None,
+        'gt': None,
+        'lt': None,
+        'ge': None,
+        'le': None,
+        'multiple_of': None,
+        'allow_inf_nan': None,
+        'max_digits': None,
+        'decimal_places': None,
+        'min_items': None,
+        'max_items': None,
+        'unique_items': None,
+        'allow_mutation': True,
+    }
+
+    def __init__(self, default: Any = Undefined, **kwargs: Any) -> None:
+        self.default = default
+        self.default_factory = kwargs.pop('default_factory', None)
+        self.alias = kwargs.pop('alias', None)
+        self.alias_priority = kwargs.pop('alias_priority', 2 if self.alias is not None else None)
+        self.title = kwargs.pop('title', None)
+        self.description = kwargs.pop('description', None)
+        self.exclude = kwargs.pop('exclude', None)
+        self.include = kwargs.pop('include', None)
+        self.const = kwargs.pop('const', None)
+        self.gt = kwargs.pop('gt', None)
+        self.ge = kwargs.pop('ge', None)
+        self.lt = kwargs.pop('lt', None)
+        self.le = kwargs.pop('le', None)
+        self.multiple_of = kwargs.pop('multiple_of', None)
+        self.allow_inf_nan = kwargs.pop('allow_inf_nan', None)
+        self.max_digits = kwargs.pop('max_digits', None)
+        self.decimal_places = kwargs.pop('decimal_places', None)
+        self.min_items = kwargs.pop('min_items', None)
+        self.max_items = kwargs.pop('max_items', None)
+        self.unique_items = kwargs.pop('unique_items', None)
+        self.min_length = kwargs.pop('min_length', None)
+        self.max_length = kwargs.pop('max_length', None)
+        self.allow_mutation = kwargs.pop('allow_mutation', True)
+        self.regex = kwargs.pop('regex', None)
+        self.discriminator = kwargs.pop('discriminator', None)
+        self.repr = kwargs.pop('repr', True)
+        self.extra = kwargs
+
+    def __repr_args__(self) -> 'ReprArgs':
+        field_defaults_to_hide: Dict[str, Any] = {
+            'repr': True,
+            **self.__field_constraints__,
+        }
+
+        attrs = ((s, getattr(self, s)) for s in self.__slots__)
+        return [(a, v) for a, v in attrs if v != field_defaults_to_hide.get(a, None)]
+
+    def get_constraints(self) -> Set[str]:
+        """
+        Gets the constraints set on the field by comparing the constraint value with its default value
+
+        :return: the constraints set on field_info
+        """
+        return {attr for attr, default in self.__field_constraints__.items() if getattr(self, attr) != default}
+
+    def update_from_config(self, from_config: Dict[str, Any]) -> None:
+        """
+        Update this FieldInfo based on a dict from get_field_info, only fields which have not been set are dated.
+        """
+        for attr_name, value in from_config.items():
+            try:
+                current_value = getattr(self, attr_name)
+            except AttributeError:
+                # attr_name is not an attribute of FieldInfo, it should therefore be added to extra
+                # (except if extra already has this value!)
+                self.extra.setdefault(attr_name, value)
+            else:
+                if current_value is self.__field_constraints__.get(attr_name, None):
+                    setattr(self, attr_name, value)
+                elif attr_name == 'exclude':
+                    self.exclude = ValueItems.merge(value, current_value)
+                elif attr_name == 'include':
+                    self.include = ValueItems.merge(value, current_value, intersect=True)
+
+    def _validate(self) -> None:
+        if self.default is not Undefined and self.default_factory is not None:
+            raise ValueError('cannot specify both default and default_factory')
+
+
+def Field(
+    default: Any = Undefined,
+    *,
+    default_factory: Optional[NoArgAnyCallable] = None,
+    alias: Optional[str] = None,
+    title: Optional[str] = None,
+    description: Optional[str] = None,
+    exclude: Optional[Union['AbstractSetIntStr', 'MappingIntStrAny', Any]] = None,
+    include: Optional[Union['AbstractSetIntStr', 'MappingIntStrAny', Any]] = None,
+    const: Optional[bool] = None,
+    gt: Optional[float] = None,
+    ge: Optional[float] = None,
+    lt: Optional[float] = None,
+    le: Optional[float] = None,
+    multiple_of: Optional[float] = None,
+    allow_inf_nan: Optional[bool] = None,
+    max_digits: Optional[int] = None,
+    decimal_places: Optional[int] = None,
+    min_items: Optional[int] = None,
+    max_items: Optional[int] = None,
+    unique_items: Optional[bool] = None,
+    min_length: Optional[int] = None,
+    max_length: Optional[int] = None,
+    allow_mutation: bool = True,
+    regex: Optional[str] = None,
+    discriminator: Optional[str] = None,
+    repr: bool = True,
+    **extra: Any,
+) -> Any:
+    """
+    Used to provide extra information about a field, either for the model schema or complex validation. Some arguments
+    apply only to number fields (``int``, ``float``, ``Decimal``) and some apply only to ``str``.
+
+    :param default: since this is replacing the field’s default, its first argument is used
+      to set the default, use ellipsis (``...``) to indicate the field is required
+    :param default_factory: callable that will be called when a default value is needed for this field
+      If both `default` and `default_factory` are set, an error is raised.
+    :param alias: the public name of the field
+    :param title: can be any string, used in the schema
+    :param description: can be any string, used in the schema
+    :param exclude: exclude this field while dumping.
+      Takes same values as the ``include`` and ``exclude`` arguments on the ``.dict`` method.
+    :param include: include this field while dumping.
+      Takes same values as the ``include`` and ``exclude`` arguments on the ``.dict`` method.
+    :param const: this field is required and *must* take it's default value
+    :param gt: only applies to numbers, requires the field to be "greater than". The schema
+      will have an ``exclusiveMinimum`` validation keyword
+    :param ge: only applies to numbers, requires the field to be "greater than or equal to". The
+      schema will have a ``minimum`` validation keyword
+    :param lt: only applies to numbers, requires the field to be "less than". The schema
+      will have an ``exclusiveMaximum`` validation keyword
+    :param le: only applies to numbers, requires the field to be "less than or equal to". The
+      schema will have a ``maximum`` validation keyword
+    :param multiple_of: only applies to numbers, requires the field to be "a multiple of". The
+      schema will have a ``multipleOf`` validation keyword
+    :param allow_inf_nan: only applies to numbers, allows the field to be NaN or infinity (+inf or -inf),
+        which is a valid Python float. Default True, set to False for compatibility with JSON.
+    :param max_digits: only applies to Decimals, requires the field to have a maximum number
+      of digits within the decimal. It does not include a zero before the decimal point or trailing decimal zeroes.
+    :param decimal_places: only applies to Decimals, requires the field to have at most a number of decimal places
+      allowed. It does not include trailing decimal zeroes.
+    :param min_items: only applies to lists, requires the field to have a minimum number of
+      elements. The schema will have a ``minItems`` validation keyword
+    :param max_items: only applies to lists, requires the field to have a maximum number of
+      elements. The schema will have a ``maxItems`` validation keyword
+    :param unique_items: only applies to lists, requires the field not to have duplicated
+      elements. The schema will have a ``uniqueItems`` validation keyword
+    :param min_length: only applies to strings, requires the field to have a minimum length. The
+      schema will have a ``minLength`` validation keyword
+    :param max_length: only applies to strings, requires the field to have a maximum length. The
+      schema will have a ``maxLength`` validation keyword
+    :param allow_mutation: a boolean which defaults to True. When False, the field raises a TypeError if the field is
+      assigned on an instance.  The BaseModel Config must set validate_assignment to True
+    :param regex: only applies to strings, requires the field match against a regular expression
+      pattern string. The schema will have a ``pattern`` validation keyword
+    :param discriminator: only useful with a (discriminated a.k.a. tagged) `Union` of sub models with a common field.
+      The `discriminator` is the name of this common field to shorten validation and improve generated schema
+    :param repr: show this field in the representation
+    :param **extra: any additional keyword arguments will be added as is to the schema
+    """
+    field_info = FieldInfo(
+        default,
+        default_factory=default_factory,
+        alias=alias,
+        title=title,
+        description=description,
+        exclude=exclude,
+        include=include,
+        const=const,
+        gt=gt,
+        ge=ge,
+        lt=lt,
+        le=le,
+        multiple_of=multiple_of,
+        allow_inf_nan=allow_inf_nan,
+        max_digits=max_digits,
+        decimal_places=decimal_places,
+        min_items=min_items,
+        max_items=max_items,
+        unique_items=unique_items,
+        min_length=min_length,
+        max_length=max_length,
+        allow_mutation=allow_mutation,
+        regex=regex,
+        discriminator=discriminator,
+        repr=repr,
+        **extra,
+    )
+    field_info._validate()
+    return field_info
+
+
+# used to be an enum but changed to int's for small performance improvement as less access overhead
+SHAPE_SINGLETON = 1
+SHAPE_LIST = 2
+SHAPE_SET = 3
+SHAPE_MAPPING = 4
+SHAPE_TUPLE = 5
+SHAPE_TUPLE_ELLIPSIS = 6
+SHAPE_SEQUENCE = 7
+SHAPE_FROZENSET = 8
+SHAPE_ITERABLE = 9
+SHAPE_GENERIC = 10
+SHAPE_DEQUE = 11
+SHAPE_DICT = 12
+SHAPE_DEFAULTDICT = 13
+SHAPE_COUNTER = 14
+SHAPE_NAME_LOOKUP = {
+    SHAPE_LIST: 'List[{}]',
+    SHAPE_SET: 'Set[{}]',
+    SHAPE_TUPLE_ELLIPSIS: 'Tuple[{}, ...]',
+    SHAPE_SEQUENCE: 'Sequence[{}]',
+    SHAPE_FROZENSET: 'FrozenSet[{}]',
+    SHAPE_ITERABLE: 'Iterable[{}]',
+    SHAPE_DEQUE: 'Deque[{}]',
+    SHAPE_DICT: 'Dict[{}]',
+    SHAPE_DEFAULTDICT: 'DefaultDict[{}]',
+    SHAPE_COUNTER: 'Counter[{}]',
+}
+
+MAPPING_LIKE_SHAPES: Set[int] = {SHAPE_DEFAULTDICT, SHAPE_DICT, SHAPE_MAPPING, SHAPE_COUNTER}
+
+
+class ModelField(Representation):
+    __slots__ = (
+        'type_',
+        'outer_type_',
+        'annotation',
+        'sub_fields',
+        'sub_fields_mapping',
+        'key_field',
+        'validators',
+        'pre_validators',
+        'post_validators',
+        'default',
+        'default_factory',
+        'required',
+        'final',
+        'model_config',
+        'name',
+        'alias',
+        'has_alias',
+        'field_info',
+        'discriminator_key',
+        'discriminator_alias',
+        'validate_always',
+        'allow_none',
+        'shape',
+        'class_validators',
+        'parse_json',
+    )
+
+    def __init__(
+        self,
+        *,
+        name: str,
+        type_: Type[Any],
+        class_validators: Optional[Dict[str, Validator]],
+        model_config: Type['BaseConfig'],
+        default: Any = None,
+        default_factory: Optional[NoArgAnyCallable] = None,
+        required: 'BoolUndefined' = Undefined,
+        final: bool = False,
+        alias: Optional[str] = None,
+        field_info: Optional[FieldInfo] = None,
+    ) -> None:
+        self.name: str = name
+        self.has_alias: bool = alias is not None
+        self.alias: str = alias if alias is not None else name
+        self.annotation = type_
+        self.type_: Any = convert_generics(type_)
+        self.outer_type_: Any = type_
+        self.class_validators = class_validators or {}
+        self.default: Any = default
+        self.default_factory: Optional[NoArgAnyCallable] = default_factory
+        self.required: 'BoolUndefined' = required
+        self.final: bool = final
+        self.model_config = model_config
+        self.field_info: FieldInfo = field_info or FieldInfo(default)
+        self.discriminator_key: Optional[str] = self.field_info.discriminator
+        self.discriminator_alias: Optional[str] = self.discriminator_key
+
+        self.allow_none: bool = False
+        self.validate_always: bool = False
+        self.sub_fields: Optional[List[ModelField]] = None
+        self.sub_fields_mapping: Optional[Dict[str, 'ModelField']] = None  # used for discriminated union
+        self.key_field: Optional[ModelField] = None
+        self.validators: 'ValidatorsList' = []
+        self.pre_validators: Optional['ValidatorsList'] = None
+        self.post_validators: Optional['ValidatorsList'] = None
+        self.parse_json: bool = False
+        self.shape: int = SHAPE_SINGLETON
+        self.model_config.prepare_field(self)
+        self.prepare()
+
+    def get_default(self) -> Any:
+        return smart_deepcopy(self.default) if self.default_factory is None else self.default_factory()
+
+    @staticmethod
+    def _get_field_info(
+        field_name: str, annotation: Any, value: Any, config: Type['BaseConfig']
+    ) -> Tuple[FieldInfo, Any]:
+        """
+        Get a FieldInfo from a root typing.Annotated annotation, value, or config default.
+
+        The FieldInfo may be set in typing.Annotated or the value, but not both. If neither contain
+        a FieldInfo, a new one will be created using the config.
+
+        :param field_name: name of the field for use in error messages
+        :param annotation: a type hint such as `str` or `Annotated[str, Field(..., min_length=5)]`
+        :param value: the field's assigned value
+        :param config: the model's config object
+        :return: the FieldInfo contained in the `annotation`, the value, or a new one from the config.
+        """
+        field_info_from_config = config.get_field_info(field_name)
+
+        field_info = None
+        if get_origin(annotation) is Annotated:
+            field_infos = [arg for arg in get_args(annotation)[1:] if isinstance(arg, FieldInfo)]
+            if len(field_infos) > 1:
+                raise ValueError(f'cannot specify multiple `Annotated` `Field`s for {field_name!r}')
+            field_info = next(iter(field_infos), None)
+            if field_info is not None:
+                field_info = copy.copy(field_info)
+                field_info.update_from_config(field_info_from_config)
+                if field_info.default not in (Undefined, Required):
+                    raise ValueError(f'`Field` default cannot be set in `Annotated` for {field_name!r}')
+                if value is not Undefined and value is not Required:
+                    # check also `Required` because of `validate_arguments` that sets `...` as default value
+                    field_info.default = value
+
+        if isinstance(value, FieldInfo):
+            if field_info is not None:
+                raise ValueError(f'cannot specify `Annotated` and value `Field`s together for {field_name!r}')
+            field_info = value
+            field_info.update_from_config(field_info_from_config)
+        elif field_info is None:
+            field_info = FieldInfo(value, **field_info_from_config)
+        value = None if field_info.default_factory is not None else field_info.default
+        field_info._validate()
+        return field_info, value
+
+    @classmethod
+    def infer(
+        cls,
+        *,
+        name: str,
+        value: Any,
+        annotation: Any,
+        class_validators: Optional[Dict[str, Validator]],
+        config: Type['BaseConfig'],
+    ) -> 'ModelField':
+        from pydantic.v1.schema import get_annotation_from_field_info
+
+        field_info, value = cls._get_field_info(name, annotation, value, config)
+        required: 'BoolUndefined' = Undefined
+        if value is Required:
+            required = True
+            value = None
+        elif value is not Undefined:
+            required = False
+        annotation = get_annotation_from_field_info(annotation, field_info, name, config.validate_assignment)
+
+        return cls(
+            name=name,
+            type_=annotation,
+            alias=field_info.alias,
+            class_validators=class_validators,
+            default=value,
+            default_factory=field_info.default_factory,
+            required=required,
+            model_config=config,
+            field_info=field_info,
+        )
+
+    def set_config(self, config: Type['BaseConfig']) -> None:
+        self.model_config = config
+        info_from_config = config.get_field_info(self.name)
+        config.prepare_field(self)
+        new_alias = info_from_config.get('alias')
+        new_alias_priority = info_from_config.get('alias_priority') or 0
+        if new_alias and new_alias_priority >= (self.field_info.alias_priority or 0):
+            self.field_info.alias = new_alias
+            self.field_info.alias_priority = new_alias_priority
+            self.alias = new_alias
+        new_exclude = info_from_config.get('exclude')
+        if new_exclude is not None:
+            self.field_info.exclude = ValueItems.merge(self.field_info.exclude, new_exclude)
+        new_include = info_from_config.get('include')
+        if new_include is not None:
+            self.field_info.include = ValueItems.merge(self.field_info.include, new_include, intersect=True)
+
+    @property
+    def alt_alias(self) -> bool:
+        return self.name != self.alias
+
+    def prepare(self) -> None:
+        """
+        Prepare the field but inspecting self.default, self.type_ etc.
+
+        Note: this method is **not** idempotent (because _type_analysis is not idempotent),
+        e.g. calling it it multiple times may modify the field and configure it incorrectly.
+        """
+        self._set_default_and_type()
+        if self.type_.__class__ is ForwardRef or self.type_.__class__ is DeferredType:
+            # self.type_ is currently a ForwardRef and there's nothing we can do now,
+            # user will need to call model.update_forward_refs()
+            return
+
+        self._type_analysis()
+        if self.required is Undefined:
+            self.required = True
+        if self.default is Undefined and self.default_factory is None:
+            self.default = None
+        self.populate_validators()
+
+    def _set_default_and_type(self) -> None:
+        """
+        Set the default value, infer the type if needed and check if `None` value is valid.
+        """
+        if self.default_factory is not None:
+            if self.type_ is Undefined:
+                raise errors_.ConfigError(
+                    f'you need to set the type of field {self.name!r} when using `default_factory`'
+                )
+            return
+
+        default_value = self.get_default()
+
+        if default_value is not None and self.type_ is Undefined:
+            self.type_ = default_value.__class__
+            self.outer_type_ = self.type_
+            self.annotation = self.type_
+
+        if self.type_ is Undefined:
+            raise errors_.ConfigError(f'unable to infer type for attribute "{self.name}"')
+
+        if self.required is False and default_value is None:
+            self.allow_none = True
+
+    def _type_analysis(self) -> None:  # noqa: C901 (ignore complexity)
+        # typing interface is horrible, we have to do some ugly checks
+        if lenient_issubclass(self.type_, JsonWrapper):
+            self.type_ = self.type_.inner_type
+            self.parse_json = True
+        elif lenient_issubclass(self.type_, Json):
+            self.type_ = Any
+            self.parse_json = True
+        elif isinstance(self.type_, TypeVar):
+            if self.type_.__bound__:
+                self.type_ = self.type_.__bound__
+            elif self.type_.__constraints__:
+                self.type_ = Union[self.type_.__constraints__]
+            else:
+                self.type_ = Any
+        elif is_new_type(self.type_):
+            self.type_ = new_type_supertype(self.type_)
+
+        if self.type_ is Any or self.type_ is object:
+            if self.required is Undefined:
+                self.required = False
+            self.allow_none = True
+            return
+        elif self.type_ is Pattern or self.type_ is re.Pattern:
+            # python 3.7 only, Pattern is a typing object but without sub fields
+            return
+        elif is_literal_type(self.type_):
+            return
+        elif is_typeddict(self.type_):
+            return
+
+        if is_finalvar(self.type_):
+            self.final = True
+
+            if self.type_ is Final:
+                self.type_ = Any
+            else:
+                self.type_ = get_args(self.type_)[0]
+
+            self._type_analysis()
+            return
+
+        origin = get_origin(self.type_)
+
+        if origin is Annotated or is_typeddict_special(origin):
+            self.type_ = get_args(self.type_)[0]
+            self._type_analysis()
+            return
+
+        if self.discriminator_key is not None and not is_union(origin):
+            raise TypeError('`discriminator` can only be used with `Union` type with more than one variant')
+
+        # add extra check for `collections.abc.Hashable` for python 3.10+ where origin is not `None`
+        if origin is None or origin is CollectionsHashable:
+            # field is not "typing" object eg. Union, Dict, List etc.
+            # allow None for virtual superclasses of NoneType, e.g. Hashable
+            if isinstance(self.type_, type) and isinstance(None, self.type_):
+                self.allow_none = True
+            return
+        elif origin is Callable:
+            return
+        elif is_union(origin):
+            types_ = []
+            for type_ in get_args(self.type_):
+                if is_none_type(type_) or type_ is Any or type_ is object:
+                    if self.required is Undefined:
+                        self.required = False
+                    self.allow_none = True
+                if is_none_type(type_):
+                    continue
+                types_.append(type_)
+
+            if len(types_) == 1:
+                # Optional[]
+                self.type_ = types_[0]
+                # this is the one case where the "outer type" isn't just the original type
+                self.outer_type_ = self.type_
+                # re-run to correctly interpret the new self.type_
+                self._type_analysis()
+            else:
+                self.sub_fields = [self._create_sub_type(t, f'{self.name}_{display_as_type(t)}') for t in types_]
+
+                if self.discriminator_key is not None:
+                    self.prepare_discriminated_union_sub_fields()
+            return
+        elif issubclass(origin, Tuple):  # type: ignore
+            # origin == Tuple without item type
+            args = get_args(self.type_)
+            if not args:  # plain tuple
+                self.type_ = Any
+                self.shape = SHAPE_TUPLE_ELLIPSIS
+            elif len(args) == 2 and args[1] is Ellipsis:  # e.g. Tuple[int, ...]
+                self.type_ = args[0]
+                self.shape = SHAPE_TUPLE_ELLIPSIS
+                self.sub_fields = [self._create_sub_type(args[0], f'{self.name}_0')]
+            elif args == ((),):  # Tuple[()] means empty tuple
+                self.shape = SHAPE_TUPLE
+                self.type_ = Any
+                self.sub_fields = []
+            else:
+                self.shape = SHAPE_TUPLE
+                self.sub_fields = [self._create_sub_type(t, f'{self.name}_{i}') for i, t in enumerate(args)]
+            return
+        elif issubclass(origin, List):
+            # Create self validators
+            get_validators = getattr(self.type_, '__get_validators__', None)
+            if get_validators:
+                self.class_validators.update(
+                    {f'list_{i}': Validator(validator, pre=True) for i, validator in enumerate(get_validators())}
+                )
+
+            self.type_ = get_args(self.type_)[0]
+            self.shape = SHAPE_LIST
+        elif issubclass(origin, Set):
+            # Create self validators
+            get_validators = getattr(self.type_, '__get_validators__', None)
+            if get_validators:
+                self.class_validators.update(
+                    {f'set_{i}': Validator(validator, pre=True) for i, validator in enumerate(get_validators())}
+                )
+
+            self.type_ = get_args(self.type_)[0]
+            self.shape = SHAPE_SET
+        elif issubclass(origin, FrozenSet):
+            # Create self validators
+            get_validators = getattr(self.type_, '__get_validators__', None)
+            if get_validators:
+                self.class_validators.update(
+                    {f'frozenset_{i}': Validator(validator, pre=True) for i, validator in enumerate(get_validators())}
+                )
+
+            self.type_ = get_args(self.type_)[0]
+            self.shape = SHAPE_FROZENSET
+        elif issubclass(origin, Deque):
+            self.type_ = get_args(self.type_)[0]
+            self.shape = SHAPE_DEQUE
+        elif issubclass(origin, Sequence):
+            self.type_ = get_args(self.type_)[0]
+            self.shape = SHAPE_SEQUENCE
+        # priority to most common mapping: dict
+        elif origin is dict or origin is Dict:
+            self.key_field = self._create_sub_type(get_args(self.type_)[0], 'key_' + self.name, for_keys=True)
+            self.type_ = get_args(self.type_)[1]
+            self.shape = SHAPE_DICT
+        elif issubclass(origin, DefaultDict):
+            self.key_field = self._create_sub_type(get_args(self.type_)[0], 'key_' + self.name, for_keys=True)
+            self.type_ = get_args(self.type_)[1]
+            self.shape = SHAPE_DEFAULTDICT
+        elif issubclass(origin, Counter):
+            self.key_field = self._create_sub_type(get_args(self.type_)[0], 'key_' + self.name, for_keys=True)
+            self.type_ = int
+            self.shape = SHAPE_COUNTER
+        elif issubclass(origin, Mapping):
+            self.key_field = self._create_sub_type(get_args(self.type_)[0], 'key_' + self.name, for_keys=True)
+            self.type_ = get_args(self.type_)[1]
+            self.shape = SHAPE_MAPPING
+        # Equality check as almost everything inherits form Iterable, including str
+        # check for Iterable and CollectionsIterable, as it could receive one even when declared with the other
+        elif origin in {Iterable, CollectionsIterable}:
+            self.type_ = get_args(self.type_)[0]
+            self.shape = SHAPE_ITERABLE
+            self.sub_fields = [self._create_sub_type(self.type_, f'{self.name}_type')]
+        elif issubclass(origin, Type):  # type: ignore
+            return
+        elif hasattr(origin, '__get_validators__') or self.model_config.arbitrary_types_allowed:
+            # Is a Pydantic-compatible generic that handles itself
+            # or we have arbitrary_types_allowed = True
+            self.shape = SHAPE_GENERIC
+            self.sub_fields = [self._create_sub_type(t, f'{self.name}_{i}') for i, t in enumerate(get_args(self.type_))]
+            self.type_ = origin
+            return
+        else:
+            raise TypeError(f'Fields of type "{origin}" are not supported.')
+
+        # type_ has been refined eg. as the type of a List and sub_fields needs to be populated
+        self.sub_fields = [self._create_sub_type(self.type_, '_' + self.name)]
+
+    def prepare_discriminated_union_sub_fields(self) -> None:
+        """
+        Prepare the mapping <discriminator key> -> <ModelField> and update `sub_fields`
+        Note that this process can be aborted if a `ForwardRef` is encountered
+        """
+        assert self.discriminator_key is not None
+
+        if self.type_.__class__ is DeferredType:
+            return
+
+        assert self.sub_fields is not None
+        sub_fields_mapping: Dict[str, 'ModelField'] = {}
+        all_aliases: Set[str] = set()
+
+        for sub_field in self.sub_fields:
+            t = sub_field.type_
+            if t.__class__ is ForwardRef:
+                # Stopping everything...will need to call `update_forward_refs`
+                return
+
+            alias, discriminator_values = get_discriminator_alias_and_values(t, self.discriminator_key)
+            all_aliases.add(alias)
+            for discriminator_value in discriminator_values:
+                sub_fields_mapping[discriminator_value] = sub_field
+
+        self.sub_fields_mapping = sub_fields_mapping
+        self.discriminator_alias = get_unique_discriminator_alias(all_aliases, self.discriminator_key)
+
+    def _create_sub_type(self, type_: Type[Any], name: str, *, for_keys: bool = False) -> 'ModelField':
+        if for_keys:
+            class_validators = None
+        else:
+            # validators for sub items should not have `each_item` as we want to check only the first sublevel
+            class_validators = {
+                k: Validator(
+                    func=v.func,
+                    pre=v.pre,
+                    each_item=False,
+                    always=v.always,
+                    check_fields=v.check_fields,
+                    skip_on_failure=v.skip_on_failure,
+                )
+                for k, v in self.class_validators.items()
+                if v.each_item
+            }
+
+        field_info, _ = self._get_field_info(name, type_, None, self.model_config)
+
+        return self.__class__(
+            type_=type_,
+            name=name,
+            class_validators=class_validators,
+            model_config=self.model_config,
+            field_info=field_info,
+        )
+
+    def populate_validators(self) -> None:
+        """
+        Prepare self.pre_validators, self.validators, and self.post_validators based on self.type_'s  __get_validators__
+        and class validators. This method should be idempotent, e.g. it should be safe to call multiple times
+        without mis-configuring the field.
+        """
+        self.validate_always = getattr(self.type_, 'validate_always', False) or any(
+            v.always for v in self.class_validators.values()
+        )
+
+        class_validators_ = self.class_validators.values()
+        if not self.sub_fields or self.shape == SHAPE_GENERIC:
+            get_validators = getattr(self.type_, '__get_validators__', None)
+            v_funcs = (
+                *[v.func for v in class_validators_ if v.each_item and v.pre],
+                *(get_validators() if get_validators else list(find_validators(self.type_, self.model_config))),
+                *[v.func for v in class_validators_ if v.each_item and not v.pre],
+            )
+            self.validators = prep_validators(v_funcs)
+
+        self.pre_validators = []
+        self.post_validators = []
+
+        if self.field_info and self.field_info.const:
+            self.post_validators.append(make_generic_validator(constant_validator))
+
+        if class_validators_:
+            self.pre_validators += prep_validators(v.func for v in class_validators_ if not v.each_item and v.pre)
+            self.post_validators += prep_validators(v.func for v in class_validators_ if not v.each_item and not v.pre)
+
+        if self.parse_json:
+            self.pre_validators.append(make_generic_validator(validate_json))
+
+        self.pre_validators = self.pre_validators or None
+        self.post_validators = self.post_validators or None
+
+    def validate(
+        self, v: Any, values: Dict[str, Any], *, loc: 'LocStr', cls: Optional['ModelOrDc'] = None
+    ) -> 'ValidateReturn':
+        assert self.type_.__class__ is not DeferredType
+
+        if self.type_.__class__ is ForwardRef:
+            assert cls is not None
+            raise ConfigError(
+                f'field "{self.name}" not yet prepared so type is still a ForwardRef, '
+                f'you might need to call {cls.__name__}.update_forward_refs().'
+            )
+
+        errors: Optional['ErrorList']
+        if self.pre_validators:
+            v, errors = self._apply_validators(v, values, loc, cls, self.pre_validators)
+            if errors:
+                return v, errors
+
+        if v is None:
+            if is_none_type(self.type_):
+                # keep validating
+                pass
+            elif self.allow_none:
+                if self.post_validators:
+                    return self._apply_validators(v, values, loc, cls, self.post_validators)
+                else:
+                    return None, None
+            else:
+                return v, ErrorWrapper(NoneIsNotAllowedError(), loc)
+
+        if self.shape == SHAPE_SINGLETON:
+            v, errors = self._validate_singleton(v, values, loc, cls)
+        elif self.shape in MAPPING_LIKE_SHAPES:
+            v, errors = self._validate_mapping_like(v, values, loc, cls)
+        elif self.shape == SHAPE_TUPLE:
+            v, errors = self._validate_tuple(v, values, loc, cls)
+        elif self.shape == SHAPE_ITERABLE:
+            v, errors = self._validate_iterable(v, values, loc, cls)
+        elif self.shape == SHAPE_GENERIC:
+            v, errors = self._apply_validators(v, values, loc, cls, self.validators)
+        else:
+            #  sequence, list, set, generator, tuple with ellipsis, frozen set
+            v, errors = self._validate_sequence_like(v, values, loc, cls)
+
+        if not errors and self.post_validators:
+            v, errors = self._apply_validators(v, values, loc, cls, self.post_validators)
+        return v, errors
+
+    def _validate_sequence_like(  # noqa: C901 (ignore complexity)
+        self, v: Any, values: Dict[str, Any], loc: 'LocStr', cls: Optional['ModelOrDc']
+    ) -> 'ValidateReturn':
+        """
+        Validate sequence-like containers: lists, tuples, sets and generators
+        Note that large if-else blocks are necessary to enable Cython
+        optimization, which is why we disable the complexity check above.
+        """
+        if not sequence_like(v):
+            e: errors_.PydanticTypeError
+            if self.shape == SHAPE_LIST:
+                e = errors_.ListError()
+            elif self.shape in (SHAPE_TUPLE, SHAPE_TUPLE_ELLIPSIS):
+                e = errors_.TupleError()
+            elif self.shape == SHAPE_SET:
+                e = errors_.SetError()
+            elif self.shape == SHAPE_FROZENSET:
+                e = errors_.FrozenSetError()
+            else:
+                e = errors_.SequenceError()
+            return v, ErrorWrapper(e, loc)
+
+        loc = loc if isinstance(loc, tuple) else (loc,)
+        result = []
+        errors: List[ErrorList] = []
+        for i, v_ in enumerate(v):
+            v_loc = *loc, i
+            r, ee = self._validate_singleton(v_, values, v_loc, cls)
+            if ee:
+                errors.append(ee)
+            else:
+                result.append(r)
+
+        if errors:
+            return v, errors
+
+        converted: Union[List[Any], Set[Any], FrozenSet[Any], Tuple[Any, ...], Iterator[Any], Deque[Any]] = result
+
+        if self.shape == SHAPE_SET:
+            converted = set(result)
+        elif self.shape == SHAPE_FROZENSET:
+            converted = frozenset(result)
+        elif self.shape == SHAPE_TUPLE_ELLIPSIS:
+            converted = tuple(result)
+        elif self.shape == SHAPE_DEQUE:
+            converted = deque(result, maxlen=getattr(v, 'maxlen', None))
+        elif self.shape == SHAPE_SEQUENCE:
+            if isinstance(v, tuple):
+                converted = tuple(result)
+            elif isinstance(v, set):
+                converted = set(result)
+            elif isinstance(v, Generator):
+                converted = iter(result)
+            elif isinstance(v, deque):
+                converted = deque(result, maxlen=getattr(v, 'maxlen', None))
+        return converted, None
+
+    def _validate_iterable(
+        self, v: Any, values: Dict[str, Any], loc: 'LocStr', cls: Optional['ModelOrDc']
+    ) -> 'ValidateReturn':
+        """
+        Validate Iterables.
+
+        This intentionally doesn't validate values to allow infinite generators.
+        """
+
+        try:
+            iterable = iter(v)
+        except TypeError:
+            return v, ErrorWrapper(errors_.IterableError(), loc)
+        return iterable, None
+
+    def _validate_tuple(
+        self, v: Any, values: Dict[str, Any], loc: 'LocStr', cls: Optional['ModelOrDc']
+    ) -> 'ValidateReturn':
+        e: Optional[Exception] = None
+        if not sequence_like(v):
+            e = errors_.TupleError()
+        else:
+            actual_length, expected_length = len(v), len(self.sub_fields)  # type: ignore
+            if actual_length != expected_length:
+                e = errors_.TupleLengthError(actual_length=actual_length, expected_length=expected_length)
+
+        if e:
+            return v, ErrorWrapper(e, loc)
+
+        loc = loc if isinstance(loc, tuple) else (loc,)
+        result = []
+        errors: List[ErrorList] = []
+        for i, (v_, field) in enumerate(zip(v, self.sub_fields)):  # type: ignore
+            v_loc = *loc, i
+            r, ee = field.validate(v_, values, loc=v_loc, cls=cls)
+            if ee:
+                errors.append(ee)
+            else:
+                result.append(r)
+
+        if errors:
+            return v, errors
+        else:
+            return tuple(result), None
+
+    def _validate_mapping_like(
+        self, v: Any, values: Dict[str, Any], loc: 'LocStr', cls: Optional['ModelOrDc']
+    ) -> 'ValidateReturn':
+        try:
+            v_iter = dict_validator(v)
+        except TypeError as exc:
+            return v, ErrorWrapper(exc, loc)
+
+        loc = loc if isinstance(loc, tuple) else (loc,)
+        result, errors = {}, []
+        for k, v_ in v_iter.items():
+            v_loc = *loc, '__key__'
+            key_result, key_errors = self.key_field.validate(k, values, loc=v_loc, cls=cls)  # type: ignore
+            if key_errors:
+                errors.append(key_errors)
+                continue
+
+            v_loc = *loc, k
+            value_result, value_errors = self._validate_singleton(v_, values, v_loc, cls)
+            if value_errors:
+                errors.append(value_errors)
+                continue
+
+            result[key_result] = value_result
+        if errors:
+            return v, errors
+        elif self.shape == SHAPE_DICT:
+            return result, None
+        elif self.shape == SHAPE_DEFAULTDICT:
+            return defaultdict(self.type_, result), None
+        elif self.shape == SHAPE_COUNTER:
+            return CollectionCounter(result), None
+        else:
+            return self._get_mapping_value(v, result), None
+
+    def _get_mapping_value(self, original: T, converted: Dict[Any, Any]) -> Union[T, Dict[Any, Any]]:
+        """
+        When type is `Mapping[KT, KV]` (or another unsupported mapping), we try to avoid
+        coercing to `dict` unwillingly.
+        """
+        original_cls = original.__class__
+
+        if original_cls == dict or original_cls == Dict:
+            return converted
+        elif original_cls in {defaultdict, DefaultDict}:
+            return defaultdict(self.type_, converted)
+        else:
+            try:
+                # Counter, OrderedDict, UserDict, ...
+                return original_cls(converted)  # type: ignore
+            except TypeError:
+                raise RuntimeError(f'Could not convert dictionary to {original_cls.__name__!r}') from None
+
+    def _validate_singleton(
+        self, v: Any, values: Dict[str, Any], loc: 'LocStr', cls: Optional['ModelOrDc']
+    ) -> 'ValidateReturn':
+        if self.sub_fields:
+            if self.discriminator_key is not None:
+                return self._validate_discriminated_union(v, values, loc, cls)
+
+            errors = []
+
+            if self.model_config.smart_union and is_union(get_origin(self.type_)):
+                # 1st pass: check if the value is an exact instance of one of the Union types
+                # (e.g. to avoid coercing a bool into an int)
+                for field in self.sub_fields:
+                    if v.__class__ is field.outer_type_:
+                        return v, None
+
+                # 2nd pass: check if the value is an instance of any subclass of the Union types
+                for field in self.sub_fields:
+                    # This whole logic will be improved later on to support more complex `isinstance` checks
+                    # It will probably be done once a strict mode is added and be something like:
+                    # ```
+                    #     value, error = field.validate(v, values, strict=True)
+                    #     if error is None:
+                    #         return value, None
+                    # ```
+                    try:
+                        if isinstance(v, field.outer_type_):
+                            return v, None
+                    except TypeError:
+                        # compound type
+                        if lenient_isinstance(v, get_origin(field.outer_type_)):
+                            value, error = field.validate(v, values, loc=loc, cls=cls)
+                            if not error:
+                                return value, None
+
+            # 1st pass by default or 3rd pass with `smart_union` enabled:
+            # check if the value can be coerced into one of the Union types
+            for field in self.sub_fields:
+                value, error = field.validate(v, values, loc=loc, cls=cls)
+                if error:
+                    errors.append(error)
+                else:
+                    return value, None
+            return v, errors
+        else:
+            return self._apply_validators(v, values, loc, cls, self.validators)
+
+    def _validate_discriminated_union(
+        self, v: Any, values: Dict[str, Any], loc: 'LocStr', cls: Optional['ModelOrDc']
+    ) -> 'ValidateReturn':
+        assert self.discriminator_key is not None
+        assert self.discriminator_alias is not None
+
+        try:
+            try:
+                discriminator_value = v[self.discriminator_alias]
+            except KeyError:
+                if self.model_config.allow_population_by_field_name:
+                    discriminator_value = v[self.discriminator_key]
+                else:
+                    raise
+        except KeyError:
+            return v, ErrorWrapper(MissingDiscriminator(discriminator_key=self.discriminator_key), loc)
+        except TypeError:
+            try:
+                # BaseModel or dataclass
+                discriminator_value = getattr(v, self.discriminator_key)
+            except (AttributeError, TypeError):
+                return v, ErrorWrapper(MissingDiscriminator(discriminator_key=self.discriminator_key), loc)
+
+        if self.sub_fields_mapping is None:
+            assert cls is not None
+            raise ConfigError(
+                f'field "{self.name}" not yet prepared so type is still a ForwardRef, '
+                f'you might need to call {cls.__name__}.update_forward_refs().'
+            )
+
+        try:
+            sub_field = self.sub_fields_mapping[discriminator_value]
+        except (KeyError, TypeError):
+            # KeyError: `discriminator_value` is not in the dictionary.
+            # TypeError: `discriminator_value` is unhashable.
+            assert self.sub_fields_mapping is not None
+            return v, ErrorWrapper(
+                InvalidDiscriminator(
+                    discriminator_key=self.discriminator_key,
+                    discriminator_value=discriminator_value,
+                    allowed_values=list(self.sub_fields_mapping),
+                ),
+                loc,
+            )
+        else:
+            if not isinstance(loc, tuple):
+                loc = (loc,)
+            return sub_field.validate(v, values, loc=(*loc, display_as_type(sub_field.type_)), cls=cls)
+
+    def _apply_validators(
+        self, v: Any, values: Dict[str, Any], loc: 'LocStr', cls: Optional['ModelOrDc'], validators: 'ValidatorsList'
+    ) -> 'ValidateReturn':
+        for validator in validators:
+            try:
+                v = validator(cls, v, values, self, self.model_config)
+            except (ValueError, TypeError, AssertionError) as exc:
+                return v, ErrorWrapper(exc, loc)
+        return v, None
+
+    def is_complex(self) -> bool:
+        """
+        Whether the field is "complex" eg. env variables should be parsed as JSON.
+        """
+        from pydantic.v1.main import BaseModel
+
+        return (
+            self.shape != SHAPE_SINGLETON
+            or hasattr(self.type_, '__pydantic_model__')
+            or lenient_issubclass(self.type_, (BaseModel, list, set, frozenset, dict))
+        )
+
+    def _type_display(self) -> PyObjectStr:
+        t = display_as_type(self.type_)
+
+        if self.shape in MAPPING_LIKE_SHAPES:
+            t = f'Mapping[{display_as_type(self.key_field.type_)}, {t}]'  # type: ignore
+        elif self.shape == SHAPE_TUPLE:
+            t = 'Tuple[{}]'.format(', '.join(display_as_type(f.type_) for f in self.sub_fields))  # type: ignore
+        elif self.shape == SHAPE_GENERIC:
+            assert self.sub_fields
+            t = '{}[{}]'.format(
+                display_as_type(self.type_), ', '.join(display_as_type(f.type_) for f in self.sub_fields)
+            )
+        elif self.shape != SHAPE_SINGLETON:
+            t = SHAPE_NAME_LOOKUP[self.shape].format(t)
+
+        if self.allow_none and (self.shape != SHAPE_SINGLETON or not self.sub_fields):
+            t = f'Optional[{t}]'
+        return PyObjectStr(t)
+
+    def __repr_args__(self) -> 'ReprArgs':
+        args = [('name', self.name), ('type', self._type_display()), ('required', self.required)]
+
+        if not self.required:
+            if self.default_factory is not None:
+                args.append(('default_factory', f'<function {self.default_factory.__name__}>'))
+            else:
+                args.append(('default', self.default))
+
+        if self.alt_alias:
+            args.append(('alias', self.alias))
+        return args
+
+
+class ModelPrivateAttr(Representation):
+    __slots__ = ('default', 'default_factory')
+
+    def __init__(self, default: Any = Undefined, *, default_factory: Optional[NoArgAnyCallable] = None) -> None:
+        self.default = default
+        self.default_factory = default_factory
+
+    def get_default(self) -> Any:
+        return smart_deepcopy(self.default) if self.default_factory is None else self.default_factory()
+
+    def __eq__(self, other: Any) -> bool:
+        return isinstance(other, self.__class__) and (self.default, self.default_factory) == (
+            other.default,
+            other.default_factory,
+        )
+
+
+def PrivateAttr(
+    default: Any = Undefined,
+    *,
+    default_factory: Optional[NoArgAnyCallable] = None,
+) -> Any:
+    """
+    Indicates that attribute is only used internally and never mixed with regular fields.
+
+    Types or values of private attrs are not checked by pydantic and it's up to you to keep them relevant.
+
+    Private attrs are stored in model __slots__.
+
+    :param default: the attribute’s default value
+    :param default_factory: callable that will be called when a default value is needed for this attribute
+      If both `default` and `default_factory` are set, an error is raised.
+    """
+    if default is not Undefined and default_factory is not None:
+        raise ValueError('cannot specify both default and default_factory')
+
+    return ModelPrivateAttr(
+        default,
+        default_factory=default_factory,
+    )
+
+
+class DeferredType:
+    """
+    Used to postpone field preparation, while creating recursive generic models.
+    """
+
+
+def is_finalvar_with_default_val(type_: Type[Any], val: Any) -> bool:
+    return is_finalvar(type_) and val is not Undefined and not isinstance(val, FieldInfo)
diff --git a/.venv/lib/python3.12/site-packages/pydantic/v1/generics.py b/.venv/lib/python3.12/site-packages/pydantic/v1/generics.py
new file mode 100644
index 00000000..9a69f2b3
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pydantic/v1/generics.py
@@ -0,0 +1,400 @@
+import sys
+import types
+import typing
+from typing import (
+    TYPE_CHECKING,
+    Any,
+    ClassVar,
+    Dict,
+    ForwardRef,
+    Generic,
+    Iterator,
+    List,
+    Mapping,
+    Optional,
+    Tuple,
+    Type,
+    TypeVar,
+    Union,
+    cast,
+)
+from weakref import WeakKeyDictionary, WeakValueDictionary
+
+from typing_extensions import Annotated, Literal as ExtLiteral
+
+from pydantic.v1.class_validators import gather_all_validators
+from pydantic.v1.fields import DeferredType
+from pydantic.v1.main import BaseModel, create_model
+from pydantic.v1.types import JsonWrapper
+from pydantic.v1.typing import display_as_type, get_all_type_hints, get_args, get_origin, typing_base
+from pydantic.v1.utils import all_identical, lenient_issubclass
+
+if sys.version_info >= (3, 10):
+    from typing import _UnionGenericAlias
+if sys.version_info >= (3, 8):
+    from typing import Literal
+
+GenericModelT = TypeVar('GenericModelT', bound='GenericModel')
+TypeVarType = Any  # since mypy doesn't allow the use of TypeVar as a type
+
+CacheKey = Tuple[Type[Any], Any, Tuple[Any, ...]]
+Parametrization = Mapping[TypeVarType, Type[Any]]
+
+# weak dictionaries allow the dynamically created parametrized versions of generic models to get collected
+# once they are no longer referenced by the caller.
+if sys.version_info >= (3, 9):  # Typing for weak dictionaries available at 3.9
+    GenericTypesCache = WeakValueDictionary[CacheKey, Type[BaseModel]]
+    AssignedParameters = WeakKeyDictionary[Type[BaseModel], Parametrization]
+else:
+    GenericTypesCache = WeakValueDictionary
+    AssignedParameters = WeakKeyDictionary
+
+# _generic_types_cache is a Mapping from __class_getitem__ arguments to the parametrized version of generic models.
+# This ensures multiple calls of e.g. A[B] return always the same class.
+_generic_types_cache = GenericTypesCache()
+
+# _assigned_parameters is a Mapping from parametrized version of generic models to assigned types of parametrizations
+# as captured during construction of the class (not instances).
+# E.g., for generic model `Model[A, B]`, when parametrized model `Model[int, str]` is created,
+# `Model[int, str]`: {A: int, B: str}` will be stored in `_assigned_parameters`.
+# (This information is only otherwise available after creation from the class name string).
+_assigned_parameters = AssignedParameters()
+
+
+class GenericModel(BaseModel):
+    __slots__ = ()
+    __concrete__: ClassVar[bool] = False
+
+    if TYPE_CHECKING:
+        # Putting this in a TYPE_CHECKING block allows us to replace `if Generic not in cls.__bases__` with
+        # `not hasattr(cls, "__parameters__")`. This means we don't need to force non-concrete subclasses of
+        # `GenericModel` to also inherit from `Generic`, which would require changes to the use of `create_model` below.
+        __parameters__: ClassVar[Tuple[TypeVarType, ...]]
+
+    # Setting the return type as Type[Any] instead of Type[BaseModel] prevents PyCharm warnings
+    def __class_getitem__(cls: Type[GenericModelT], params: Union[Type[Any], Tuple[Type[Any], ...]]) -> Type[Any]:
+        """Instantiates a new class from a generic class `cls` and type variables `params`.
+
+        :param params: Tuple of types the class . Given a generic class
+            `Model` with 2 type variables and a concrete model `Model[str, int]`,
+            the value `(str, int)` would be passed to `params`.
+        :return: New model class inheriting from `cls` with instantiated
+            types described by `params`. If no parameters are given, `cls` is
+            returned as is.
+
+        """
+
+        def _cache_key(_params: Any) -> CacheKey:
+            args = get_args(_params)
+            # python returns a list for Callables, which is not hashable
+            if len(args) == 2 and isinstance(args[0], list):
+                args = (tuple(args[0]), args[1])
+            return cls, _params, args
+
+        cached = _generic_types_cache.get(_cache_key(params))
+        if cached is not None:
+            return cached
+        if cls.__concrete__ and Generic not in cls.__bases__:
+            raise TypeError('Cannot parameterize a concrete instantiation of a generic model')
+        if not isinstance(params, tuple):
+            params = (params,)
+        if cls is GenericModel and any(isinstance(param, TypeVar) for param in params):
+            raise TypeError('Type parameters should be placed on typing.Generic, not GenericModel')
+        if not hasattr(cls, '__parameters__'):
+            raise TypeError(f'Type {cls.__name__} must inherit from typing.Generic before being parameterized')
+
+        check_parameters_count(cls, params)
+        # Build map from generic typevars to passed params
+        typevars_map: Dict[TypeVarType, Type[Any]] = dict(zip(cls.__parameters__, params))
+        if all_identical(typevars_map.keys(), typevars_map.values()) and typevars_map:
+            return cls  # if arguments are equal to parameters it's the same object
+
+        # Create new model with original model as parent inserting fields with DeferredType.
+        model_name = cls.__concrete_name__(params)
+        validators = gather_all_validators(cls)
+
+        type_hints = get_all_type_hints(cls).items()
+        instance_type_hints = {k: v for k, v in type_hints if get_origin(v) is not ClassVar}
+
+        fields = {k: (DeferredType(), cls.__fields__[k].field_info) for k in instance_type_hints if k in cls.__fields__}
+
+        model_module, called_globally = get_caller_frame_info()
+        created_model = cast(
+            Type[GenericModel],  # casting ensures mypy is aware of the __concrete__ and __parameters__ attributes
+            create_model(
+                model_name,
+                __module__=model_module or cls.__module__,
+                __base__=(cls,) + tuple(cls.__parameterized_bases__(typevars_map)),
+                __config__=None,
+                __validators__=validators,
+                __cls_kwargs__=None,
+                **fields,
+            ),
+        )
+
+        _assigned_parameters[created_model] = typevars_map
+
+        if called_globally:  # create global reference and therefore allow pickling
+            object_by_reference = None
+            reference_name = model_name
+            reference_module_globals = sys.modules[created_model.__module__].__dict__
+            while object_by_reference is not created_model:
+                object_by_reference = reference_module_globals.setdefault(reference_name, created_model)
+                reference_name += '_'
+
+        created_model.Config = cls.Config
+
+        # Find any typevars that are still present in the model.
+        # If none are left, the model is fully "concrete", otherwise the new
+        # class is a generic class as well taking the found typevars as
+        # parameters.
+        new_params = tuple(
+            {param: None for param in iter_contained_typevars(typevars_map.values())}
+        )  # use dict as ordered set
+        created_model.__concrete__ = not new_params
+        if new_params:
+            created_model.__parameters__ = new_params
+
+        # Save created model in cache so we don't end up creating duplicate
+        # models that should be identical.
+        _generic_types_cache[_cache_key(params)] = created_model
+        if len(params) == 1:
+            _generic_types_cache[_cache_key(params[0])] = created_model
+
+        # Recursively walk class type hints and replace generic typevars
+        # with concrete types that were passed.
+        _prepare_model_fields(created_model, fields, instance_type_hints, typevars_map)
+
+        return created_model
+
+    @classmethod
+    def __concrete_name__(cls: Type[Any], params: Tuple[Type[Any], ...]) -> str:
+        """Compute class name for child classes.
+
+        :param params: Tuple of types the class . Given a generic class
+            `Model` with 2 type variables and a concrete model `Model[str, int]`,
+            the value `(str, int)` would be passed to `params`.
+        :return: String representing a the new class where `params` are
+            passed to `cls` as type variables.
+
+        This method can be overridden to achieve a custom naming scheme for GenericModels.
+        """
+        param_names = [display_as_type(param) for param in params]
+        params_component = ', '.join(param_names)
+        return f'{cls.__name__}[{params_component}]'
+
+    @classmethod
+    def __parameterized_bases__(cls, typevars_map: Parametrization) -> Iterator[Type[Any]]:
+        """
+        Returns unbound bases of cls parameterised to given type variables
+
+        :param typevars_map: Dictionary of type applications for binding subclasses.
+            Given a generic class `Model` with 2 type variables [S, T]
+            and a concrete model `Model[str, int]`,
+            the value `{S: str, T: int}` would be passed to `typevars_map`.
+        :return: an iterator of generic sub classes, parameterised by `typevars_map`
+            and other assigned parameters of `cls`
+
+        e.g.:
+        ```
+        class A(GenericModel, Generic[T]):
+            ...
+
+        class B(A[V], Generic[V]):
+            ...
+
+        assert A[int] in B.__parameterized_bases__({V: int})
+        ```
+        """
+
+        def build_base_model(
+            base_model: Type[GenericModel], mapped_types: Parametrization
+        ) -> Iterator[Type[GenericModel]]:
+            base_parameters = tuple(mapped_types[param] for param in base_model.__parameters__)
+            parameterized_base = base_model.__class_getitem__(base_parameters)
+            if parameterized_base is base_model or parameterized_base is cls:
+                # Avoid duplication in MRO
+                return
+            yield parameterized_base
+
+        for base_model in cls.__bases__:
+            if not issubclass(base_model, GenericModel):
+                # not a class that can be meaningfully parameterized
+                continue
+            elif not getattr(base_model, '__parameters__', None):
+                # base_model is "GenericModel"  (and has no __parameters__)
+                # or
+                # base_model is already concrete, and will be included transitively via cls.
+                continue
+            elif cls in _assigned_parameters:
+                if base_model in _assigned_parameters:
+                    # cls is partially parameterised but not from base_model
+                    # e.g. cls = B[S], base_model = A[S]
+                    # B[S][int] should subclass A[int],  (and will be transitively via B[int])
+                    # but it's not viable to consistently subclass types with arbitrary construction
+                    # So don't attempt to include A[S][int]
+                    continue
+                else:  # base_model not in _assigned_parameters:
+                    # cls is partially parameterized, base_model is original generic
+                    # e.g.  cls = B[str, T], base_model = B[S, T]
+                    # Need to determine the mapping for the base_model parameters
+                    mapped_types: Parametrization = {
+                        key: typevars_map.get(value, value) for key, value in _assigned_parameters[cls].items()
+                    }
+                    yield from build_base_model(base_model, mapped_types)
+            else:
+                # cls is base generic, so base_class has a distinct base
+                # can construct the Parameterised base model using typevars_map directly
+                yield from build_base_model(base_model, typevars_map)
+
+
+def replace_types(type_: Any, type_map: Mapping[Any, Any]) -> Any:
+    """Return type with all occurrences of `type_map` keys recursively replaced with their values.
+
+    :param type_: Any type, class or generic alias
+    :param type_map: Mapping from `TypeVar` instance to concrete types.
+    :return: New type representing the basic structure of `type_` with all
+        `typevar_map` keys recursively replaced.
+
+    >>> replace_types(Tuple[str, Union[List[str], float]], {str: int})
+    Tuple[int, Union[List[int], float]]
+
+    """
+    if not type_map:
+        return type_
+
+    type_args = get_args(type_)
+    origin_type = get_origin(type_)
+
+    if origin_type is Annotated:
+        annotated_type, *annotations = type_args
+        return Annotated[replace_types(annotated_type, type_map), tuple(annotations)]
+
+    if (origin_type is ExtLiteral) or (sys.version_info >= (3, 8) and origin_type is Literal):
+        return type_map.get(type_, type_)
+    # Having type args is a good indicator that this is a typing module
+    # class instantiation or a generic alias of some sort.
+    if type_args:
+        resolved_type_args = tuple(replace_types(arg, type_map) for arg in type_args)
+        if all_identical(type_args, resolved_type_args):
+            # If all arguments are the same, there is no need to modify the
+            # type or create a new object at all
+            return type_
+        if (
+            origin_type is not None
+            and isinstance(type_, typing_base)
+            and not isinstance(origin_type, typing_base)
+            and getattr(type_, '_name', None) is not None
+        ):
+            # In python < 3.9 generic aliases don't exist so any of these like `list`,
+            # `type` or `collections.abc.Callable` need to be translated.
+            # See: https://www.python.org/dev/peps/pep-0585
+            origin_type = getattr(typing, type_._name)
+        assert origin_type is not None
+        # PEP-604 syntax (Ex.: list | str) is represented with a types.UnionType object that does not have __getitem__.
+        # We also cannot use isinstance() since we have to compare types.
+        if sys.version_info >= (3, 10) and origin_type is types.UnionType:  # noqa: E721
+            return _UnionGenericAlias(origin_type, resolved_type_args)
+        return origin_type[resolved_type_args]
+
+    # We handle pydantic generic models separately as they don't have the same
+    # semantics as "typing" classes or generic aliases
+    if not origin_type and lenient_issubclass(type_, GenericModel) and not type_.__concrete__:
+        type_args = type_.__parameters__
+        resolved_type_args = tuple(replace_types(t, type_map) for t in type_args)
+        if all_identical(type_args, resolved_type_args):
+            return type_
+        return type_[resolved_type_args]
+
+    # Handle special case for typehints that can have lists as arguments.
+    # `typing.Callable[[int, str], int]` is an example for this.
+    if isinstance(type_, (List, list)):
+        resolved_list = list(replace_types(element, type_map) for element in type_)
+        if all_identical(type_, resolved_list):
+            return type_
+        return resolved_list
+
+    # For JsonWrapperValue, need to handle its inner type to allow correct parsing
+    # of generic Json arguments like Json[T]
+    if not origin_type and lenient_issubclass(type_, JsonWrapper):
+        type_.inner_type = replace_types(type_.inner_type, type_map)
+        return type_
+
+    # If all else fails, we try to resolve the type directly and otherwise just
+    # return the input with no modifications.
+    new_type = type_map.get(type_, type_)
+    # Convert string to ForwardRef
+    if isinstance(new_type, str):
+        return ForwardRef(new_type)
+    else:
+        return new_type
+
+
+def check_parameters_count(cls: Type[GenericModel], parameters: Tuple[Any, ...]) -> None:
+    actual = len(parameters)
+    expected = len(cls.__parameters__)
+    if actual != expected:
+        description = 'many' if actual > expected else 'few'
+        raise TypeError(f'Too {description} parameters for {cls.__name__}; actual {actual}, expected {expected}')
+
+
+DictValues: Type[Any] = {}.values().__class__
+
+
+def iter_contained_typevars(v: Any) -> Iterator[TypeVarType]:
+    """Recursively iterate through all subtypes and type args of `v` and yield any typevars that are found."""
+    if isinstance(v, TypeVar):
+        yield v
+    elif hasattr(v, '__parameters__') and not get_origin(v) and lenient_issubclass(v, GenericModel):
+        yield from v.__parameters__
+    elif isinstance(v, (DictValues, list)):
+        for var in v:
+            yield from iter_contained_typevars(var)
+    else:
+        args = get_args(v)
+        for arg in args:
+            yield from iter_contained_typevars(arg)
+
+
+def get_caller_frame_info() -> Tuple[Optional[str], bool]:
+    """
+    Used inside a function to check whether it was called globally
+
+    Will only work against non-compiled code, therefore used only in pydantic.generics
+
+    :returns Tuple[module_name, called_globally]
+    """
+    try:
+        previous_caller_frame = sys._getframe(2)
+    except ValueError as e:
+        raise RuntimeError('This function must be used inside another function') from e
+    except AttributeError:  # sys module does not have _getframe function, so there's nothing we can do about it
+        return None, False
+    frame_globals = previous_caller_frame.f_globals
+    return frame_globals.get('__name__'), previous_caller_frame.f_locals is frame_globals
+
+
+def _prepare_model_fields(
+    created_model: Type[GenericModel],
+    fields: Mapping[str, Any],
+    instance_type_hints: Mapping[str, type],
+    typevars_map: Mapping[Any, type],
+) -> None:
+    """
+    Replace DeferredType fields with concrete type hints and prepare them.
+    """
+
+    for key, field in created_model.__fields__.items():
+        if key not in fields:
+            assert field.type_.__class__ is not DeferredType
+            # https://github.com/nedbat/coveragepy/issues/198
+            continue  # pragma: no cover
+
+        assert field.type_.__class__ is DeferredType, field.type_.__class__
+
+        field_type_hint = instance_type_hints[key]
+        concrete_type = replace_types(field_type_hint, typevars_map)
+        field.type_ = concrete_type
+        field.outer_type_ = concrete_type
+        field.prepare()
+        created_model.__annotations__[key] = concrete_type
diff --git a/.venv/lib/python3.12/site-packages/pydantic/v1/json.py b/.venv/lib/python3.12/site-packages/pydantic/v1/json.py
new file mode 100644
index 00000000..41d0d5fc
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pydantic/v1/json.py
@@ -0,0 +1,112 @@
+import datetime
+from collections import deque
+from decimal import Decimal
+from enum import Enum
+from ipaddress import IPv4Address, IPv4Interface, IPv4Network, IPv6Address, IPv6Interface, IPv6Network
+from pathlib import Path
+from re import Pattern
+from types import GeneratorType
+from typing import Any, Callable, Dict, Type, Union
+from uuid import UUID
+
+from pydantic.v1.color import Color
+from pydantic.v1.networks import NameEmail
+from pydantic.v1.types import SecretBytes, SecretStr
+
+__all__ = 'pydantic_encoder', 'custom_pydantic_encoder', 'timedelta_isoformat'
+
+
+def isoformat(o: Union[datetime.date, datetime.time]) -> str:
+    return o.isoformat()
+
+
+def decimal_encoder(dec_value: Decimal) -> Union[int, float]:
+    """
+    Encodes a Decimal as int of there's no exponent, otherwise float
+
+    This is useful when we use ConstrainedDecimal to represent Numeric(x,0)
+    where a integer (but not int typed) is used. Encoding this as a float
+    results in failed round-tripping between encode and parse.
+    Our Id type is a prime example of this.
+
+    >>> decimal_encoder(Decimal("1.0"))
+    1.0
+
+    >>> decimal_encoder(Decimal("1"))
+    1
+    """
+    if dec_value.as_tuple().exponent >= 0:
+        return int(dec_value)
+    else:
+        return float(dec_value)
+
+
+ENCODERS_BY_TYPE: Dict[Type[Any], Callable[[Any], Any]] = {
+    bytes: lambda o: o.decode(),
+    Color: str,
+    datetime.date: isoformat,
+    datetime.datetime: isoformat,
+    datetime.time: isoformat,
+    datetime.timedelta: lambda td: td.total_seconds(),
+    Decimal: decimal_encoder,
+    Enum: lambda o: o.value,
+    frozenset: list,
+    deque: list,
+    GeneratorType: list,
+    IPv4Address: str,
+    IPv4Interface: str,
+    IPv4Network: str,
+    IPv6Address: str,
+    IPv6Interface: str,
+    IPv6Network: str,
+    NameEmail: str,
+    Path: str,
+    Pattern: lambda o: o.pattern,
+    SecretBytes: str,
+    SecretStr: str,
+    set: list,
+    UUID: str,
+}
+
+
+def pydantic_encoder(obj: Any) -> Any:
+    from dataclasses import asdict, is_dataclass
+
+    from pydantic.v1.main import BaseModel
+
+    if isinstance(obj, BaseModel):
+        return obj.dict()
+    elif is_dataclass(obj):
+        return asdict(obj)
+
+    # Check the class type and its superclasses for a matching encoder
+    for base in obj.__class__.__mro__[:-1]:
+        try:
+            encoder = ENCODERS_BY_TYPE[base]
+        except KeyError:
+            continue
+        return encoder(obj)
+    else:  # We have exited the for loop without finding a suitable encoder
+        raise TypeError(f"Object of type '{obj.__class__.__name__}' is not JSON serializable")
+
+
+def custom_pydantic_encoder(type_encoders: Dict[Any, Callable[[Type[Any]], Any]], obj: Any) -> Any:
+    # Check the class type and its superclasses for a matching encoder
+    for base in obj.__class__.__mro__[:-1]:
+        try:
+            encoder = type_encoders[base]
+        except KeyError:
+            continue
+
+        return encoder(obj)
+    else:  # We have exited the for loop without finding a suitable encoder
+        return pydantic_encoder(obj)
+
+
+def timedelta_isoformat(td: datetime.timedelta) -> str:
+    """
+    ISO 8601 encoding for Python timedelta object.
+    """
+    minutes, seconds = divmod(td.seconds, 60)
+    hours, minutes = divmod(minutes, 60)
+    return f'{"-" if td.days < 0 else ""}P{abs(td.days)}DT{hours:d}H{minutes:d}M{seconds:d}.{td.microseconds:06d}S'
diff --git a/.venv/lib/python3.12/site-packages/pydantic/v1/main.py b/.venv/lib/python3.12/site-packages/pydantic/v1/main.py
new file mode 100644
index 00000000..68af6f55
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pydantic/v1/main.py
@@ -0,0 +1,1107 @@
+import warnings
+from abc import ABCMeta
+from copy import deepcopy
+from enum import Enum
+from functools import partial
+from pathlib import Path
+from types import FunctionType, prepare_class, resolve_bases
+from typing import (
+    TYPE_CHECKING,
+    AbstractSet,
+    Any,
+    Callable,
+    ClassVar,
+    Dict,
+    List,
+    Mapping,
+    Optional,
+    Tuple,
+    Type,
+    TypeVar,
+    Union,
+    cast,
+    no_type_check,
+    overload,
+)
+
+from typing_extensions import dataclass_transform
+
+from pydantic.v1.class_validators import ValidatorGroup, extract_root_validators, extract_validators, inherit_validators
+from pydantic.v1.config import BaseConfig, Extra, inherit_config, prepare_config
+from pydantic.v1.error_wrappers import ErrorWrapper, ValidationError
+from pydantic.v1.errors import ConfigError, DictError, ExtraError, MissingError
+from pydantic.v1.fields import (
+    MAPPING_LIKE_SHAPES,
+    Field,
+    ModelField,
+    ModelPrivateAttr,
+    PrivateAttr,
+    Undefined,
+    is_finalvar_with_default_val,
+)
+from pydantic.v1.json import custom_pydantic_encoder, pydantic_encoder
+from pydantic.v1.parse import Protocol, load_file, load_str_bytes
+from pydantic.v1.schema import default_ref_template, model_schema
+from pydantic.v1.types import PyObject, StrBytes
+from pydantic.v1.typing import (
+    AnyCallable,
+    get_args,
+    get_origin,
+    is_classvar,
+    is_namedtuple,
+    is_union,
+    resolve_annotations,
+    update_model_forward_refs,
+)
+from pydantic.v1.utils import (
+    DUNDER_ATTRIBUTES,
+    ROOT_KEY,
+    ClassAttribute,
+    GetterDict,
+    Representation,
+    ValueItems,
+    generate_model_signature,
+    is_valid_field,
+    is_valid_private_name,
+    lenient_issubclass,
+    sequence_like,
+    smart_deepcopy,
+    unique_list,
+    validate_field_name,
+)
+
+if TYPE_CHECKING:
+    from inspect import Signature
+
+    from pydantic.v1.class_validators import ValidatorListDict
+    from pydantic.v1.types import ModelOrDc
+    from pydantic.v1.typing import (
+        AbstractSetIntStr,
+        AnyClassMethod,
+        CallableGenerator,
+        DictAny,
+        DictStrAny,
+        MappingIntStrAny,
+        ReprArgs,
+        SetStr,
+        TupleGenerator,
+    )
+
+    Model = TypeVar('Model', bound='BaseModel')
+
+__all__ = 'BaseModel', 'create_model', 'validate_model'
+
+_T = TypeVar('_T')
+
+
+def validate_custom_root_type(fields: Dict[str, ModelField]) -> None:
+    if len(fields) > 1:
+        raise ValueError(f'{ROOT_KEY} cannot be mixed with other fields')
+
+
+def generate_hash_function(frozen: bool) -> Optional[Callable[[Any], int]]:
+    def hash_function(self_: Any) -> int:
+        return hash(self_.__class__) + hash(tuple(self_.__dict__.values()))
+
+    return hash_function if frozen else None
+
+
+# If a field is of type `Callable`, its default value should be a function and cannot to ignored.
+ANNOTATED_FIELD_UNTOUCHED_TYPES: Tuple[Any, ...] = (property, type, classmethod, staticmethod)
+# When creating a `BaseModel` instance, we bypass all the methods, properties... added to the model
+UNTOUCHED_TYPES: Tuple[Any, ...] = (FunctionType,) + ANNOTATED_FIELD_UNTOUCHED_TYPES
+# Note `ModelMetaclass` refers to `BaseModel`, but is also used to *create* `BaseModel`, so we need to add this extra
+# (somewhat hacky) boolean to keep track of whether we've created the `BaseModel` class yet, and therefore whether it's
+# safe to refer to it. If it *hasn't* been created, we assume that the `__new__` call we're in the middle of is for
+# the `BaseModel` class, since that's defined immediately after the metaclass.
+_is_base_model_class_defined = False
+
+
+@dataclass_transform(kw_only_default=True, field_specifiers=(Field,))
+class ModelMetaclass(ABCMeta):
+    @no_type_check  # noqa C901
+    def __new__(mcs, name, bases, namespace, **kwargs):  # noqa C901
+        fields: Dict[str, ModelField] = {}
+        config = BaseConfig
+        validators: 'ValidatorListDict' = {}
+
+        pre_root_validators, post_root_validators = [], []
+        private_attributes: Dict[str, ModelPrivateAttr] = {}
+        base_private_attributes: Dict[str, ModelPrivateAttr] = {}
+        slots: SetStr = namespace.get('__slots__', ())
+        slots = {slots} if isinstance(slots, str) else set(slots)
+        class_vars: SetStr = set()
+        hash_func: Optional[Callable[[Any], int]] = None
+
+        for base in reversed(bases):
+            if _is_base_model_class_defined and issubclass(base, BaseModel) and base != BaseModel:
+                fields.update(smart_deepcopy(base.__fields__))
+                config = inherit_config(base.__config__, config)
+                validators = inherit_validators(base.__validators__, validators)
+                pre_root_validators += base.__pre_root_validators__
+                post_root_validators += base.__post_root_validators__
+                base_private_attributes.update(base.__private_attributes__)
+                class_vars.update(base.__class_vars__)
+                hash_func = base.__hash__
+
+        resolve_forward_refs = kwargs.pop('__resolve_forward_refs__', True)
+        allowed_config_kwargs: SetStr = {
+            key
+            for key in dir(config)
+            if not (key.startswith('__') and key.endswith('__'))  # skip dunder methods and attributes
+        }
+        config_kwargs = {key: kwargs.pop(key) for key in kwargs.keys() & allowed_config_kwargs}
+        config_from_namespace = namespace.get('Config')
+        if config_kwargs and config_from_namespace:
+            raise TypeError('Specifying config in two places is ambiguous, use either Config attribute or class kwargs')
+        config = inherit_config(config_from_namespace, config, **config_kwargs)
+
+        validators = inherit_validators(extract_validators(namespace), validators)
+        vg = ValidatorGroup(validators)
+
+        for f in fields.values():
+            f.set_config(config)
+            extra_validators = vg.get_validators(f.name)
+            if extra_validators:
+                f.class_validators.update(extra_validators)
+                # re-run prepare to add extra validators
+                f.populate_validators()
+
+        prepare_config(config, name)
+
+        untouched_types = ANNOTATED_FIELD_UNTOUCHED_TYPES
+
+        def is_untouched(v: Any) -> bool:
+            return isinstance(v, untouched_types) or v.__class__.__name__ == 'cython_function_or_method'
+
+        if (namespace.get('__module__'), namespace.get('__qualname__')) != ('pydantic.main', 'BaseModel'):
+            annotations = resolve_annotations(namespace.get('__annotations__', {}), namespace.get('__module__', None))
+            # annotation only fields need to come first in fields
+            for ann_name, ann_type in annotations.items():
+                if is_classvar(ann_type):
+                    class_vars.add(ann_name)
+                elif is_finalvar_with_default_val(ann_type, namespace.get(ann_name, Undefined)):
+                    class_vars.add(ann_name)
+                elif is_valid_field(ann_name):
+                    validate_field_name(bases, ann_name)
+                    value = namespace.get(ann_name, Undefined)
+                    allowed_types = get_args(ann_type) if is_union(get_origin(ann_type)) else (ann_type,)
+                    if (
+                        is_untouched(value)
+                        and ann_type != PyObject
+                        and not any(
+                            lenient_issubclass(get_origin(allowed_type), Type) for allowed_type in allowed_types
+                        )
+                    ):
+                        continue
+                    fields[ann_name] = ModelField.infer(
+                        name=ann_name,
+                        value=value,
+                        annotation=ann_type,
+                        class_validators=vg.get_validators(ann_name),
+                        config=config,
+                    )
+                elif ann_name not in namespace and config.underscore_attrs_are_private:
+                    private_attributes[ann_name] = PrivateAttr()
+
+            untouched_types = UNTOUCHED_TYPES + config.keep_untouched
+            for var_name, value in namespace.items():
+                can_be_changed = var_name not in class_vars and not is_untouched(value)
+                if isinstance(value, ModelPrivateAttr):
+                    if not is_valid_private_name(var_name):
+                        raise NameError(
+                            f'Private attributes "{var_name}" must not be a valid field name; '
+                            f'Use sunder or dunder names, e. g. "_{var_name}" or "__{var_name}__"'
+                        )
+                    private_attributes[var_name] = value
+                elif config.underscore_attrs_are_private and is_valid_private_name(var_name) and can_be_changed:
+                    private_attributes[var_name] = PrivateAttr(default=value)
+                elif is_valid_field(var_name) and var_name not in annotations and can_be_changed:
+                    validate_field_name(bases, var_name)
+                    inferred = ModelField.infer(
+                        name=var_name,
+                        value=value,
+                        annotation=annotations.get(var_name, Undefined),
+                        class_validators=vg.get_validators(var_name),
+                        config=config,
+                    )
+                    if var_name in fields:
+                        if lenient_issubclass(inferred.type_, fields[var_name].type_):
+                            inferred.type_ = fields[var_name].type_
+                        else:
+                            raise TypeError(
+                                f'The type of {name}.{var_name} differs from the new default value; '
+                                f'if you wish to change the type of this field, please use a type annotation'
+                            )
+                    fields[var_name] = inferred
+
+        _custom_root_type = ROOT_KEY in fields
+        if _custom_root_type:
+            validate_custom_root_type(fields)
+        vg.check_for_unused()
+        if config.json_encoders:
+            json_encoder = partial(custom_pydantic_encoder, config.json_encoders)
+        else:
+            json_encoder = pydantic_encoder
+        pre_rv_new, post_rv_new = extract_root_validators(namespace)
+
+        if hash_func is None:
+            hash_func = generate_hash_function(config.frozen)
+
+        exclude_from_namespace = fields | private_attributes.keys() | {'__slots__'}
+        new_namespace = {
+            '__config__': config,
+            '__fields__': fields,
+            '__exclude_fields__': {
+                name: field.field_info.exclude for name, field in fields.items() if field.field_info.exclude is not None
+            }
+            or None,
+            '__include_fields__': {
+                name: field.field_info.include for name, field in fields.items() if field.field_info.include is not None
+            }
+            or None,
+            '__validators__': vg.validators,
+            '__pre_root_validators__': unique_list(
+                pre_root_validators + pre_rv_new,
+                name_factory=lambda v: v.__name__,
+            ),
+            '__post_root_validators__': unique_list(
+                post_root_validators + post_rv_new,
+                name_factory=lambda skip_on_failure_and_v: skip_on_failure_and_v[1].__name__,
+            ),
+            '__schema_cache__': {},
+            '__json_encoder__': staticmethod(json_encoder),
+            '__custom_root_type__': _custom_root_type,
+            '__private_attributes__': {**base_private_attributes, **private_attributes},
+            '__slots__': slots | private_attributes.keys(),
+            '__hash__': hash_func,
+            '__class_vars__': class_vars,
+            **{n: v for n, v in namespace.items() if n not in exclude_from_namespace},
+        }
+
+        cls = super().__new__(mcs, name, bases, new_namespace, **kwargs)
+        # set __signature__ attr only for model class, but not for its instances
+        cls.__signature__ = ClassAttribute('__signature__', generate_model_signature(cls.__init__, fields, config))
+        if resolve_forward_refs:
+            cls.__try_update_forward_refs__()
+
+        # preserve `__set_name__` protocol defined in https://peps.python.org/pep-0487
+        # for attributes not in `new_namespace` (e.g. private attributes)
+        for name, obj in namespace.items():
+            if name not in new_namespace:
+                set_name = getattr(obj, '__set_name__', None)
+                if callable(set_name):
+                    set_name(cls, name)
+
+        return cls
+
+    def __instancecheck__(self, instance: Any) -> bool:
+        """
+        Avoid calling ABC _abc_subclasscheck unless we're pretty sure.
+
+        See #3829 and python/cpython#92810
+        """
+        return hasattr(instance, '__post_root_validators__') and super().__instancecheck__(instance)
+
+
+object_setattr = object.__setattr__
+
+
+class BaseModel(Representation, metaclass=ModelMetaclass):
+    if TYPE_CHECKING:
+        # populated by the metaclass, defined here to help IDEs only
+        __fields__: ClassVar[Dict[str, ModelField]] = {}
+        __include_fields__: ClassVar[Optional[Mapping[str, Any]]] = None
+        __exclude_fields__: ClassVar[Optional[Mapping[str, Any]]] = None
+        __validators__: ClassVar[Dict[str, AnyCallable]] = {}
+        __pre_root_validators__: ClassVar[List[AnyCallable]]
+        __post_root_validators__: ClassVar[List[Tuple[bool, AnyCallable]]]
+        __config__: ClassVar[Type[BaseConfig]] = BaseConfig
+        __json_encoder__: ClassVar[Callable[[Any], Any]] = lambda x: x
+        __schema_cache__: ClassVar['DictAny'] = {}
+        __custom_root_type__: ClassVar[bool] = False
+        __signature__: ClassVar['Signature']
+        __private_attributes__: ClassVar[Dict[str, ModelPrivateAttr]]
+        __class_vars__: ClassVar[SetStr]
+        __fields_set__: ClassVar[SetStr] = set()
+
+    Config = BaseConfig
+    __slots__ = ('__dict__', '__fields_set__')
+    __doc__ = ''  # Null out the Representation docstring
+
+    def __init__(__pydantic_self__, **data: Any) -> None:
+        """
+        Create a new model by parsing and validating input data from keyword arguments.
+
+        Raises ValidationError if the input data cannot be parsed to form a valid model.
+        """
+        # Uses something other than `self` the first arg to allow "self" as a settable attribute
+        values, fields_set, validation_error = validate_model(__pydantic_self__.__class__, data)
+        if validation_error:
+            raise validation_error
+        try:
+            object_setattr(__pydantic_self__, '__dict__', values)
+        except TypeError as e:
+            raise TypeError(
+                'Model values must be a dict; you may not have returned a dictionary from a root validator'
+            ) from e
+        object_setattr(__pydantic_self__, '__fields_set__', fields_set)
+        __pydantic_self__._init_private_attributes()
+
+    @no_type_check
+    def __setattr__(self, name, value):  # noqa: C901 (ignore complexity)
+        if name in self.__private_attributes__ or name in DUNDER_ATTRIBUTES:
+            return object_setattr(self, name, value)
+
+        if self.__config__.extra is not Extra.allow and name not in self.__fields__:
+            raise ValueError(f'"{self.__class__.__name__}" object has no field "{name}"')
+        elif not self.__config__.allow_mutation or self.__config__.frozen:
+            raise TypeError(f'"{self.__class__.__name__}" is immutable and does not support item assignment')
+        elif name in self.__fields__ and self.__fields__[name].final:
+            raise TypeError(
+                f'"{self.__class__.__name__}" object "{name}" field is final and does not support reassignment'
+            )
+        elif self.__config__.validate_assignment:
+            new_values = {**self.__dict__, name: value}
+
+            for validator in self.__pre_root_validators__:
+                try:
+                    new_values = validator(self.__class__, new_values)
+                except (ValueError, TypeError, AssertionError) as exc:
+                    raise ValidationError([ErrorWrapper(exc, loc=ROOT_KEY)], self.__class__)
+
+            known_field = self.__fields__.get(name, None)
+            if known_field:
+                # We want to
+                # - make sure validators are called without the current value for this field inside `values`
+                # - keep other values (e.g. submodels) untouched (using `BaseModel.dict()` will change them into dicts)
+                # - keep the order of the fields
+                if not known_field.field_info.allow_mutation:
+                    raise TypeError(f'"{known_field.name}" has allow_mutation set to False and cannot be assigned')
+                dict_without_original_value = {k: v for k, v in self.__dict__.items() if k != name}
+                value, error_ = known_field.validate(value, dict_without_original_value, loc=name, cls=self.__class__)
+                if error_:
+                    raise ValidationError([error_], self.__class__)
+                else:
+                    new_values[name] = value
+
+            errors = []
+            for skip_on_failure, validator in self.__post_root_validators__:
+                if skip_on_failure and errors:
+                    continue
+                try:
+                    new_values = validator(self.__class__, new_values)
+                except (ValueError, TypeError, AssertionError) as exc:
+                    errors.append(ErrorWrapper(exc, loc=ROOT_KEY))
+            if errors:
+                raise ValidationError(errors, self.__class__)
+
+            # update the whole __dict__ as other values than just `value`
+            # may be changed (e.g. with `root_validator`)
+            object_setattr(self, '__dict__', new_values)
+        else:
+            self.__dict__[name] = value
+
+        self.__fields_set__.add(name)
+
+    def __getstate__(self) -> 'DictAny':
+        private_attrs = ((k, getattr(self, k, Undefined)) for k in self.__private_attributes__)
+        return {
+            '__dict__': self.__dict__,
+            '__fields_set__': self.__fields_set__,
+            '__private_attribute_values__': {k: v for k, v in private_attrs if v is not Undefined},
+        }
+
+    def __setstate__(self, state: 'DictAny') -> None:
+        object_setattr(self, '__dict__', state['__dict__'])
+        object_setattr(self, '__fields_set__', state['__fields_set__'])
+        for name, value in state.get('__private_attribute_values__', {}).items():
+            object_setattr(self, name, value)
+
+    def _init_private_attributes(self) -> None:
+        for name, private_attr in self.__private_attributes__.items():
+            default = private_attr.get_default()
+            if default is not Undefined:
+                object_setattr(self, name, default)
+
+    def dict(
+        self,
+        *,
+        include: Optional[Union['AbstractSetIntStr', 'MappingIntStrAny']] = None,
+        exclude: Optional[Union['AbstractSetIntStr', 'MappingIntStrAny']] = None,
+        by_alias: bool = False,
+        skip_defaults: Optional[bool] = None,
+        exclude_unset: bool = False,
+        exclude_defaults: bool = False,
+        exclude_none: bool = False,
+    ) -> 'DictStrAny':
+        """
+        Generate a dictionary representation of the model, optionally specifying which fields to include or exclude.
+
+        """
+        if skip_defaults is not None:
+            warnings.warn(
+                f'{self.__class__.__name__}.dict(): "skip_defaults" is deprecated and replaced by "exclude_unset"',
+                DeprecationWarning,
+            )
+            exclude_unset = skip_defaults
+
+        return dict(
+            self._iter(
+                to_dict=True,
+                by_alias=by_alias,
+                include=include,
+                exclude=exclude,
+                exclude_unset=exclude_unset,
+                exclude_defaults=exclude_defaults,
+                exclude_none=exclude_none,
+            )
+        )
+
+    def json(
+        self,
+        *,
+        include: Optional[Union['AbstractSetIntStr', 'MappingIntStrAny']] = None,
+        exclude: Optional[Union['AbstractSetIntStr', 'MappingIntStrAny']] = None,
+        by_alias: bool = False,
+        skip_defaults: Optional[bool] = None,
+        exclude_unset: bool = False,
+        exclude_defaults: bool = False,
+        exclude_none: bool = False,
+        encoder: Optional[Callable[[Any], Any]] = None,
+        models_as_dict: bool = True,
+        **dumps_kwargs: Any,
+    ) -> str:
+        """
+        Generate a JSON representation of the model, `include` and `exclude` arguments as per `dict()`.
+
+        `encoder` is an optional function to supply as `default` to json.dumps(), other arguments as per `json.dumps()`.
+        """
+        if skip_defaults is not None:
+            warnings.warn(
+                f'{self.__class__.__name__}.json(): "skip_defaults" is deprecated and replaced by "exclude_unset"',
+                DeprecationWarning,
+            )
+            exclude_unset = skip_defaults
+        encoder = cast(Callable[[Any], Any], encoder or self.__json_encoder__)
+
+        # We don't directly call `self.dict()`, which does exactly this with `to_dict=True`
+        # because we want to be able to keep raw `BaseModel` instances and not as `dict`.
+        # This allows users to write custom JSON encoders for given `BaseModel` classes.
+        data = dict(
+            self._iter(
+                to_dict=models_as_dict,
+                by_alias=by_alias,
+                include=include,
+                exclude=exclude,
+                exclude_unset=exclude_unset,
+                exclude_defaults=exclude_defaults,
+                exclude_none=exclude_none,
+            )
+        )
+        if self.__custom_root_type__:
+            data = data[ROOT_KEY]
+        return self.__config__.json_dumps(data, default=encoder, **dumps_kwargs)
+
+    @classmethod
+    def _enforce_dict_if_root(cls, obj: Any) -> Any:
+        if cls.__custom_root_type__ and (
+            not (isinstance(obj, dict) and obj.keys() == {ROOT_KEY})
+            and not (isinstance(obj, BaseModel) and obj.__fields__.keys() == {ROOT_KEY})
+            or cls.__fields__[ROOT_KEY].shape in MAPPING_LIKE_SHAPES
+        ):
+            return {ROOT_KEY: obj}
+        else:
+            return obj
+
+    @classmethod
+    def parse_obj(cls: Type['Model'], obj: Any) -> 'Model':
+        obj = cls._enforce_dict_if_root(obj)
+        if not isinstance(obj, dict):
+            try:
+                obj = dict(obj)
+            except (TypeError, ValueError) as e:
+                exc = TypeError(f'{cls.__name__} expected dict not {obj.__class__.__name__}')
+                raise ValidationError([ErrorWrapper(exc, loc=ROOT_KEY)], cls) from e
+        return cls(**obj)
+
+    @classmethod
+    def parse_raw(
+        cls: Type['Model'],
+        b: StrBytes,
+        *,
+        content_type: str = None,
+        encoding: str = 'utf8',
+        proto: Protocol = None,
+        allow_pickle: bool = False,
+    ) -> 'Model':
+        try:
+            obj = load_str_bytes(
+                b,
+                proto=proto,
+                content_type=content_type,
+                encoding=encoding,
+                allow_pickle=allow_pickle,
+                json_loads=cls.__config__.json_loads,
+            )
+        except (ValueError, TypeError, UnicodeDecodeError) as e:
+            raise ValidationError([ErrorWrapper(e, loc=ROOT_KEY)], cls)
+        return cls.parse_obj(obj)
+
+    @classmethod
+    def parse_file(
+        cls: Type['Model'],
+        path: Union[str, Path],
+        *,
+        content_type: str = None,
+        encoding: str = 'utf8',
+        proto: Protocol = None,
+        allow_pickle: bool = False,
+    ) -> 'Model':
+        obj = load_file(
+            path,
+            proto=proto,
+            content_type=content_type,
+            encoding=encoding,
+            allow_pickle=allow_pickle,
+            json_loads=cls.__config__.json_loads,
+        )
+        return cls.parse_obj(obj)
+
+    @classmethod
+    def from_orm(cls: Type['Model'], obj: Any) -> 'Model':
+        if not cls.__config__.orm_mode:
+            raise ConfigError('You must have the config attribute orm_mode=True to use from_orm')
+        obj = {ROOT_KEY: obj} if cls.__custom_root_type__ else cls._decompose_class(obj)
+        m = cls.__new__(cls)
+        values, fields_set, validation_error = validate_model(cls, obj)
+        if validation_error:
+            raise validation_error
+        object_setattr(m, '__dict__', values)
+        object_setattr(m, '__fields_set__', fields_set)
+        m._init_private_attributes()
+        return m
+
+    @classmethod
+    def construct(cls: Type['Model'], _fields_set: Optional['SetStr'] = None, **values: Any) -> 'Model':
+        """
+        Creates a new model setting __dict__ and __fields_set__ from trusted or pre-validated data.
+        Default values are respected, but no other validation is performed.
+        Behaves as if `Config.extra = 'allow'` was set since it adds all passed values
+        """
+        m = cls.__new__(cls)
+        fields_values: Dict[str, Any] = {}
+        for name, field in cls.__fields__.items():
+            if field.alt_alias and field.alias in values:
+                fields_values[name] = values[field.alias]
+            elif name in values:
+                fields_values[name] = values[name]
+            elif not field.required:
+                fields_values[name] = field.get_default()
+        fields_values.update(values)
+        object_setattr(m, '__dict__', fields_values)
+        if _fields_set is None:
+            _fields_set = set(values.keys())
+        object_setattr(m, '__fields_set__', _fields_set)
+        m._init_private_attributes()
+        return m
+
+    def _copy_and_set_values(self: 'Model', values: 'DictStrAny', fields_set: 'SetStr', *, deep: bool) -> 'Model':
+        if deep:
+            # chances of having empty dict here are quite low for using smart_deepcopy
+            values = deepcopy(values)
+
+        cls = self.__class__
+        m = cls.__new__(cls)
+        object_setattr(m, '__dict__', values)
+        object_setattr(m, '__fields_set__', fields_set)
+        for name in self.__private_attributes__:
+            value = getattr(self, name, Undefined)
+            if value is not Undefined:
+                if deep:
+                    value = deepcopy(value)
+                object_setattr(m, name, value)
+
+        return m
+
+    def copy(
+        self: 'Model',
+        *,
+        include: Optional[Union['AbstractSetIntStr', 'MappingIntStrAny']] = None,
+        exclude: Optional[Union['AbstractSetIntStr', 'MappingIntStrAny']] = None,
+        update: Optional['DictStrAny'] = None,
+        deep: bool = False,
+    ) -> 'Model':
+        """
+        Duplicate a model, optionally choose which fields to include, exclude and change.
+
+        :param include: fields to include in new model
+        :param exclude: fields to exclude from new model, as with values this takes precedence over include
+        :param update: values to change/add in the new model. Note: the data is not validated before creating
+            the new model: you should trust this data
+        :param deep: set to `True` to make a deep copy of the model
+        :return: new model instance
+        """
+
+        values = dict(
+            self._iter(to_dict=False, by_alias=False, include=include, exclude=exclude, exclude_unset=False),
+            **(update or {}),
+        )
+
+        # new `__fields_set__` can have unset optional fields with a set value in `update` kwarg
+        if update:
+            fields_set = self.__fields_set__ | update.keys()
+        else:
+            fields_set = set(self.__fields_set__)
+
+        return self._copy_and_set_values(values, fields_set, deep=deep)
+
+    @classmethod
+    def schema(cls, by_alias: bool = True, ref_template: str = default_ref_template) -> 'DictStrAny':
+        cached = cls.__schema_cache__.get((by_alias, ref_template))
+        if cached is not None:
+            return cached
+        s = model_schema(cls, by_alias=by_alias, ref_template=ref_template)
+        cls.__schema_cache__[(by_alias, ref_template)] = s
+        return s
+
+    @classmethod
+    def schema_json(
+        cls, *, by_alias: bool = True, ref_template: str = default_ref_template, **dumps_kwargs: Any
+    ) -> str:
+        from pydantic.v1.json import pydantic_encoder
+
+        return cls.__config__.json_dumps(
+            cls.schema(by_alias=by_alias, ref_template=ref_template), default=pydantic_encoder, **dumps_kwargs
+        )
+
+    @classmethod
+    def __get_validators__(cls) -> 'CallableGenerator':
+        yield cls.validate
+
+    @classmethod
+    def validate(cls: Type['Model'], value: Any) -> 'Model':
+        if isinstance(value, cls):
+            copy_on_model_validation = cls.__config__.copy_on_model_validation
+            # whether to deep or shallow copy the model on validation, None means do not copy
+            deep_copy: Optional[bool] = None
+            if copy_on_model_validation not in {'deep', 'shallow', 'none'}:
+                # Warn about deprecated behavior
+                warnings.warn(
+                    "`copy_on_model_validation` should be a string: 'deep', 'shallow' or 'none'", DeprecationWarning
+                )
+                if copy_on_model_validation:
+                    deep_copy = False
+
+            if copy_on_model_validation == 'shallow':
+                # shallow copy
+                deep_copy = False
+            elif copy_on_model_validation == 'deep':
+                # deep copy
+                deep_copy = True
+
+            if deep_copy is None:
+                return value
+            else:
+                return value._copy_and_set_values(value.__dict__, value.__fields_set__, deep=deep_copy)
+
+        value = cls._enforce_dict_if_root(value)
+
+        if isinstance(value, dict):
+            return cls(**value)
+        elif cls.__config__.orm_mode:
+            return cls.from_orm(value)
+        else:
+            try:
+                value_as_dict = dict(value)
+            except (TypeError, ValueError) as e:
+                raise DictError() from e
+            return cls(**value_as_dict)
+
+    @classmethod
+    def _decompose_class(cls: Type['Model'], obj: Any) -> GetterDict:
+        if isinstance(obj, GetterDict):
+            return obj
+        return cls.__config__.getter_dict(obj)
+
+    @classmethod
+    @no_type_check
+    def _get_value(
+        cls,
+        v: Any,
+        to_dict: bool,
+        by_alias: bool,
+        include: Optional[Union['AbstractSetIntStr', 'MappingIntStrAny']],
+        exclude: Optional[Union['AbstractSetIntStr', 'MappingIntStrAny']],
+        exclude_unset: bool,
+        exclude_defaults: bool,
+        exclude_none: bool,
+    ) -> Any:
+        if isinstance(v, BaseModel):
+            if to_dict:
+                v_dict = v.dict(
+                    by_alias=by_alias,
+                    exclude_unset=exclude_unset,
+                    exclude_defaults=exclude_defaults,
+                    include=include,
+                    exclude=exclude,
+                    exclude_none=exclude_none,
+                )
+                if ROOT_KEY in v_dict:
+                    return v_dict[ROOT_KEY]
+                return v_dict
+            else:
+                return v.copy(include=include, exclude=exclude)
+
+        value_exclude = ValueItems(v, exclude) if exclude else None
+        value_include = ValueItems(v, include) if include else None
+
+        if isinstance(v, dict):
+            return {
+                k_: cls._get_value(
+                    v_,
+                    to_dict=to_dict,
+                    by_alias=by_alias,
+                    exclude_unset=exclude_unset,
+                    exclude_defaults=exclude_defaults,
+                    include=value_include and value_include.for_element(k_),
+                    exclude=value_exclude and value_exclude.for_element(k_),
+                    exclude_none=exclude_none,
+                )
+                for k_, v_ in v.items()
+                if (not value_exclude or not value_exclude.is_excluded(k_))
+                and (not value_include or value_include.is_included(k_))
+            }
+
+        elif sequence_like(v):
+            seq_args = (
+                cls._get_value(
+                    v_,
+                    to_dict=to_dict,
+                    by_alias=by_alias,
+                    exclude_unset=exclude_unset,
+                    exclude_defaults=exclude_defaults,
+                    include=value_include and value_include.for_element(i),
+                    exclude=value_exclude and value_exclude.for_element(i),
+                    exclude_none=exclude_none,
+                )
+                for i, v_ in enumerate(v)
+                if (not value_exclude or not value_exclude.is_excluded(i))
+                and (not value_include or value_include.is_included(i))
+            )
+
+            return v.__class__(*seq_args) if is_namedtuple(v.__class__) else v.__class__(seq_args)
+
+        elif isinstance(v, Enum) and getattr(cls.Config, 'use_enum_values', False):
+            return v.value
+
+        else:
+            return v
+
+    @classmethod
+    def __try_update_forward_refs__(cls, **localns: Any) -> None:
+        """
+        Same as update_forward_refs but will not raise exception
+        when forward references are not defined.
+        """
+        update_model_forward_refs(cls, cls.__fields__.values(), cls.__config__.json_encoders, localns, (NameError,))
+
+    @classmethod
+    def update_forward_refs(cls, **localns: Any) -> None:
+        """
+        Try to update ForwardRefs on fields based on this Model, globalns and localns.
+        """
+        update_model_forward_refs(cls, cls.__fields__.values(), cls.__config__.json_encoders, localns)
+
+    def __iter__(self) -> 'TupleGenerator':
+        """
+        so `dict(model)` works
+        """
+        yield from self.__dict__.items()
+
+    def _iter(
+        self,
+        to_dict: bool = False,
+        by_alias: bool = False,
+        include: Optional[Union['AbstractSetIntStr', 'MappingIntStrAny']] = None,
+        exclude: Optional[Union['AbstractSetIntStr', 'MappingIntStrAny']] = None,
+        exclude_unset: bool = False,
+        exclude_defaults: bool = False,
+        exclude_none: bool = False,
+    ) -> 'TupleGenerator':
+        # Merge field set excludes with explicit exclude parameter with explicit overriding field set options.
+        # The extra "is not None" guards are not logically necessary but optimizes performance for the simple case.
+        if exclude is not None or self.__exclude_fields__ is not None:
+            exclude = ValueItems.merge(self.__exclude_fields__, exclude)
+
+        if include is not None or self.__include_fields__ is not None:
+            include = ValueItems.merge(self.__include_fields__, include, intersect=True)
+
+        allowed_keys = self._calculate_keys(
+            include=include, exclude=exclude, exclude_unset=exclude_unset  # type: ignore
+        )
+        if allowed_keys is None and not (to_dict or by_alias or exclude_unset or exclude_defaults or exclude_none):
+            # huge boost for plain _iter()
+            yield from self.__dict__.items()
+            return
+
+        value_exclude = ValueItems(self, exclude) if exclude is not None else None
+        value_include = ValueItems(self, include) if include is not None else None
+
+        for field_key, v in self.__dict__.items():
+            if (allowed_keys is not None and field_key not in allowed_keys) or (exclude_none and v is None):
+                continue
+
+            if exclude_defaults:
+                model_field = self.__fields__.get(field_key)
+                if not getattr(model_field, 'required', True) and getattr(model_field, 'default', _missing) == v:
+                    continue
+
+            if by_alias and field_key in self.__fields__:
+                dict_key = self.__fields__[field_key].alias
+            else:
+                dict_key = field_key
+
+            if to_dict or value_include or value_exclude:
+                v = self._get_value(
+                    v,
+                    to_dict=to_dict,
+                    by_alias=by_alias,
+                    include=value_include and value_include.for_element(field_key),
+                    exclude=value_exclude and value_exclude.for_element(field_key),
+                    exclude_unset=exclude_unset,
+                    exclude_defaults=exclude_defaults,
+                    exclude_none=exclude_none,
+                )
+            yield dict_key, v
+
+    def _calculate_keys(
+        self,
+        include: Optional['MappingIntStrAny'],
+        exclude: Optional['MappingIntStrAny'],
+        exclude_unset: bool,
+        update: Optional['DictStrAny'] = None,
+    ) -> Optional[AbstractSet[str]]:
+        if include is None and exclude is None and exclude_unset is False:
+            return None
+
+        keys: AbstractSet[str]
+        if exclude_unset:
+            keys = self.__fields_set__.copy()
+        else:
+            keys = self.__dict__.keys()
+
+        if include is not None:
+            keys &= include.keys()
+
+        if update:
+            keys -= update.keys()
+
+        if exclude:
+            keys -= {k for k, v in exclude.items() if ValueItems.is_true(v)}
+
+        return keys
+
+    def __eq__(self, other: Any) -> bool:
+        if isinstance(other, BaseModel):
+            return self.dict() == other.dict()
+        else:
+            return self.dict() == other
+
+    def __repr_args__(self) -> 'ReprArgs':
+        return [
+            (k, v)
+            for k, v in self.__dict__.items()
+            if k not in DUNDER_ATTRIBUTES and (k not in self.__fields__ or self.__fields__[k].field_info.repr)
+        ]
+
+
+_is_base_model_class_defined = True
+
+
+@overload
+def create_model(
+    __model_name: str,
+    *,
+    __config__: Optional[Type[BaseConfig]] = None,
+    __base__: None = None,
+    __module__: str = __name__,
+    __validators__: Dict[str, 'AnyClassMethod'] = None,
+    __cls_kwargs__: Dict[str, Any] = None,
+    **field_definitions: Any,
+) -> Type['BaseModel']:
+    ...
+
+
+@overload
+def create_model(
+    __model_name: str,
+    *,
+    __config__: Optional[Type[BaseConfig]] = None,
+    __base__: Union[Type['Model'], Tuple[Type['Model'], ...]],
+    __module__: str = __name__,
+    __validators__: Dict[str, 'AnyClassMethod'] = None,
+    __cls_kwargs__: Dict[str, Any] = None,
+    **field_definitions: Any,
+) -> Type['Model']:
+    ...
+
+
+def create_model(
+    __model_name: str,
+    *,
+    __config__: Optional[Type[BaseConfig]] = None,
+    __base__: Union[None, Type['Model'], Tuple[Type['Model'], ...]] = None,
+    __module__: str = __name__,
+    __validators__: Dict[str, 'AnyClassMethod'] = None,
+    __cls_kwargs__: Dict[str, Any] = None,
+    __slots__: Optional[Tuple[str, ...]] = None,
+    **field_definitions: Any,
+) -> Type['Model']:
+    """
+    Dynamically create a model.
+    :param __model_name: name of the created model
+    :param __config__: config class to use for the new model
+    :param __base__: base class for the new model to inherit from
+    :param __module__: module of the created model
+    :param __validators__: a dict of method names and @validator class methods
+    :param __cls_kwargs__: a dict for class creation
+    :param __slots__: Deprecated, `__slots__` should not be passed to `create_model`
+    :param field_definitions: fields of the model (or extra fields if a base is supplied)
+        in the format `<name>=(<type>, <default default>)` or `<name>=<default value>, e.g.
+        `foobar=(str, ...)` or `foobar=123`, or, for complex use-cases, in the format
+        `<name>=<Field>` or `<name>=(<type>, <FieldInfo>)`, e.g.
+        `foo=Field(datetime, default_factory=datetime.utcnow, alias='bar')` or
+        `foo=(str, FieldInfo(title='Foo'))`
+    """
+    if __slots__ is not None:
+        # __slots__ will be ignored from here on
+        warnings.warn('__slots__ should not be passed to create_model', RuntimeWarning)
+
+    if __base__ is not None:
+        if __config__ is not None:
+            raise ConfigError('to avoid confusion __config__ and __base__ cannot be used together')
+        if not isinstance(__base__, tuple):
+            __base__ = (__base__,)
+    else:
+        __base__ = (cast(Type['Model'], BaseModel),)
+
+    __cls_kwargs__ = __cls_kwargs__ or {}
+
+    fields = {}
+    annotations = {}
+
+    for f_name, f_def in field_definitions.items():
+        if not is_valid_field(f_name):
+            warnings.warn(f'fields may not start with an underscore, ignoring "{f_name}"', RuntimeWarning)
+        if isinstance(f_def, tuple):
+            try:
+                f_annotation, f_value = f_def
+            except ValueError as e:
+                raise ConfigError(
+                    'field definitions should either be a tuple of (<type>, <default>) or just a '
+                    'default value, unfortunately this means tuples as '
+                    'default values are not allowed'
+                ) from e
+        else:
+            f_annotation, f_value = None, f_def
+
+        if f_annotation:
+            annotations[f_name] = f_annotation
+        fields[f_name] = f_value
+
+    namespace: 'DictStrAny' = {'__annotations__': annotations, '__module__': __module__}
+    if __validators__:
+        namespace.update(__validators__)
+    namespace.update(fields)
+    if __config__:
+        namespace['Config'] = inherit_config(__config__, BaseConfig)
+    resolved_bases = resolve_bases(__base__)
+    meta, ns, kwds = prepare_class(__model_name, resolved_bases, kwds=__cls_kwargs__)
+    if resolved_bases is not __base__:
+        ns['__orig_bases__'] = __base__
+    namespace.update(ns)
+    return meta(__model_name, resolved_bases, namespace, **kwds)
+
+
+_missing = object()
+
+
+def validate_model(  # noqa: C901 (ignore complexity)
+    model: Type[BaseModel], input_data: 'DictStrAny', cls: 'ModelOrDc' = None
+) -> Tuple['DictStrAny', 'SetStr', Optional[ValidationError]]:
+    """
+    validate data against a model.
+    """
+    values = {}
+    errors = []
+    # input_data names, possibly alias
+    names_used = set()
+    # field names, never aliases
+    fields_set = set()
+    config = model.__config__
+    check_extra = config.extra is not Extra.ignore
+    cls_ = cls or model
+
+    for validator in model.__pre_root_validators__:
+        try:
+            input_data = validator(cls_, input_data)
+        except (ValueError, TypeError, AssertionError) as exc:
+            return {}, set(), ValidationError([ErrorWrapper(exc, loc=ROOT_KEY)], cls_)
+
+    for name, field in model.__fields__.items():
+        value = input_data.get(field.alias, _missing)
+        using_name = False
+        if value is _missing and config.allow_population_by_field_name and field.alt_alias:
+            value = input_data.get(field.name, _missing)
+            using_name = True
+
+        if value is _missing:
+            if field.required:
+                errors.append(ErrorWrapper(MissingError(), loc=field.alias))
+                continue
+
+            value = field.get_default()
+
+            if not config.validate_all and not field.validate_always:
+                values[name] = value
+                continue
+        else:
+            fields_set.add(name)
+            if check_extra:
+                names_used.add(field.name if using_name else field.alias)
+
+        v_, errors_ = field.validate(value, values, loc=field.alias, cls=cls_)
+        if isinstance(errors_, ErrorWrapper):
+            errors.append(errors_)
+        elif isinstance(errors_, list):
+            errors.extend(errors_)
+        else:
+            values[name] = v_
+
+    if check_extra:
+        if isinstance(input_data, GetterDict):
+            extra = input_data.extra_keys() - names_used
+        else:
+            extra = input_data.keys() - names_used
+        if extra:
+            fields_set |= extra
+            if config.extra is Extra.allow:
+                for f in extra:
+                    values[f] = input_data[f]
+            else:
+                for f in sorted(extra):
+                    errors.append(ErrorWrapper(ExtraError(), loc=f))
+
+    for skip_on_failure, validator in model.__post_root_validators__:
+        if skip_on_failure and errors:
+            continue
+        try:
+            values = validator(cls_, values)
+        except (ValueError, TypeError, AssertionError) as exc:
+            errors.append(ErrorWrapper(exc, loc=ROOT_KEY))
+
+    if errors:
+        return values, fields_set, ValidationError(errors, cls_)
+    else:
+        return values, fields_set, None
diff --git a/.venv/lib/python3.12/site-packages/pydantic/v1/mypy.py b/.venv/lib/python3.12/site-packages/pydantic/v1/mypy.py
new file mode 100644
index 00000000..f4e27ab4
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pydantic/v1/mypy.py
@@ -0,0 +1,949 @@
+import sys
+from configparser import ConfigParser
+from typing import Any, Callable, Dict, List, Optional, Set, Tuple, Type as TypingType, Union
+
+from mypy.errorcodes import ErrorCode
+from mypy.nodes import (
+    ARG_NAMED,
+    ARG_NAMED_OPT,
+    ARG_OPT,
+    ARG_POS,
+    ARG_STAR2,
+    MDEF,
+    Argument,
+    AssignmentStmt,
+    Block,
+    CallExpr,
+    ClassDef,
+    Context,
+    Decorator,
+    EllipsisExpr,
+    FuncBase,
+    FuncDef,
+    JsonDict,
+    MemberExpr,
+    NameExpr,
+    PassStmt,
+    PlaceholderNode,
+    RefExpr,
+    StrExpr,
+    SymbolNode,
+    SymbolTableNode,
+    TempNode,
+    TypeInfo,
+    TypeVarExpr,
+    Var,
+)
+from mypy.options import Options
+from mypy.plugin import (
+    CheckerPluginInterface,
+    ClassDefContext,
+    FunctionContext,
+    MethodContext,
+    Plugin,
+    ReportConfigContext,
+    SemanticAnalyzerPluginInterface,
+)
+from mypy.plugins import dataclasses
+from mypy.semanal import set_callable_name  # type: ignore
+from mypy.server.trigger import make_wildcard_trigger
+from mypy.types import (
+    AnyType,
+    CallableType,
+    Instance,
+    NoneType,
+    Overloaded,
+    ProperType,
+    Type,
+    TypeOfAny,
+    TypeType,
+    TypeVarId,
+    TypeVarType,
+    UnionType,
+    get_proper_type,
+)
+from mypy.typevars import fill_typevars
+from mypy.util import get_unique_redefinition_name
+from mypy.version import __version__ as mypy_version
+
+from pydantic.v1.utils import is_valid_field
+
+try:
+    from mypy.types import TypeVarDef  # type: ignore[attr-defined]
+except ImportError:  # pragma: no cover
+    # Backward-compatible with TypeVarDef from Mypy 0.910.
+    from mypy.types import TypeVarType as TypeVarDef
+
+CONFIGFILE_KEY = 'pydantic-mypy'
+METADATA_KEY = 'pydantic-mypy-metadata'
+_NAMESPACE = __name__[:-5]  # 'pydantic' in 1.10.X, 'pydantic.v1' in v2.X
+BASEMODEL_FULLNAME = f'{_NAMESPACE}.main.BaseModel'
+BASESETTINGS_FULLNAME = f'{_NAMESPACE}.env_settings.BaseSettings'
+MODEL_METACLASS_FULLNAME = f'{_NAMESPACE}.main.ModelMetaclass'
+FIELD_FULLNAME = f'{_NAMESPACE}.fields.Field'
+DATACLASS_FULLNAME = f'{_NAMESPACE}.dataclasses.dataclass'
+
+
+def parse_mypy_version(version: str) -> Tuple[int, ...]:
+    return tuple(map(int, version.partition('+')[0].split('.')))
+
+
+MYPY_VERSION_TUPLE = parse_mypy_version(mypy_version)
+BUILTINS_NAME = 'builtins' if MYPY_VERSION_TUPLE >= (0, 930) else '__builtins__'
+
+# Increment version if plugin changes and mypy caches should be invalidated
+__version__ = 2
+
+
+def plugin(version: str) -> 'TypingType[Plugin]':
+    """
+    `version` is the mypy version string
+
+    We might want to use this to print a warning if the mypy version being used is
+    newer, or especially older, than we expect (or need).
+    """
+    return PydanticPlugin
+
+
+class PydanticPlugin(Plugin):
+    def __init__(self, options: Options) -> None:
+        self.plugin_config = PydanticPluginConfig(options)
+        self._plugin_data = self.plugin_config.to_data()
+        super().__init__(options)
+
+    def get_base_class_hook(self, fullname: str) -> 'Optional[Callable[[ClassDefContext], None]]':
+        sym = self.lookup_fully_qualified(fullname)
+        if sym and isinstance(sym.node, TypeInfo):  # pragma: no branch
+            # No branching may occur if the mypy cache has not been cleared
+            if any(get_fullname(base) == BASEMODEL_FULLNAME for base in sym.node.mro):
+                return self._pydantic_model_class_maker_callback
+        return None
+
+    def get_metaclass_hook(self, fullname: str) -> Optional[Callable[[ClassDefContext], None]]:
+        if fullname == MODEL_METACLASS_FULLNAME:
+            return self._pydantic_model_metaclass_marker_callback
+        return None
+
+    def get_function_hook(self, fullname: str) -> 'Optional[Callable[[FunctionContext], Type]]':
+        sym = self.lookup_fully_qualified(fullname)
+        if sym and sym.fullname == FIELD_FULLNAME:
+            return self._pydantic_field_callback
+        return None
+
+    def get_method_hook(self, fullname: str) -> Optional[Callable[[MethodContext], Type]]:
+        if fullname.endswith('.from_orm'):
+            return from_orm_callback
+        return None
+
+    def get_class_decorator_hook(self, fullname: str) -> Optional[Callable[[ClassDefContext], None]]:
+        """Mark pydantic.dataclasses as dataclass.
+
+        Mypy version 1.1.1 added support for `@dataclass_transform` decorator.
+        """
+        if fullname == DATACLASS_FULLNAME and MYPY_VERSION_TUPLE < (1, 1):
+            return dataclasses.dataclass_class_maker_callback  # type: ignore[return-value]
+        return None
+
+    def report_config_data(self, ctx: ReportConfigContext) -> Dict[str, Any]:
+        """Return all plugin config data.
+
+        Used by mypy to determine if cache needs to be discarded.
+        """
+        return self._plugin_data
+
+    def _pydantic_model_class_maker_callback(self, ctx: ClassDefContext) -> None:
+        transformer = PydanticModelTransformer(ctx, self.plugin_config)
+        transformer.transform()
+
+    def _pydantic_model_metaclass_marker_callback(self, ctx: ClassDefContext) -> None:
+        """Reset dataclass_transform_spec attribute of ModelMetaclass.
+
+        Let the plugin handle it. This behavior can be disabled
+        if 'debug_dataclass_transform' is set to True', for testing purposes.
+        """
+        if self.plugin_config.debug_dataclass_transform:
+            return
+        info_metaclass = ctx.cls.info.declared_metaclass
+        assert info_metaclass, "callback not passed from 'get_metaclass_hook'"
+        if getattr(info_metaclass.type, 'dataclass_transform_spec', None):
+            info_metaclass.type.dataclass_transform_spec = None  # type: ignore[attr-defined]
+
+    def _pydantic_field_callback(self, ctx: FunctionContext) -> 'Type':
+        """
+        Extract the type of the `default` argument from the Field function, and use it as the return type.
+
+        In particular:
+        * Check whether the default and default_factory argument is specified.
+        * Output an error if both are specified.
+        * Retrieve the type of the argument which is specified, and use it as return type for the function.
+        """
+        default_any_type = ctx.default_return_type
+
+        assert ctx.callee_arg_names[0] == 'default', '"default" is no longer first argument in Field()'
+        assert ctx.callee_arg_names[1] == 'default_factory', '"default_factory" is no longer second argument in Field()'
+        default_args = ctx.args[0]
+        default_factory_args = ctx.args[1]
+
+        if default_args and default_factory_args:
+            error_default_and_default_factory_specified(ctx.api, ctx.context)
+            return default_any_type
+
+        if default_args:
+            default_type = ctx.arg_types[0][0]
+            default_arg = default_args[0]
+
+            # Fallback to default Any type if the field is required
+            if not isinstance(default_arg, EllipsisExpr):
+                return default_type
+
+        elif default_factory_args:
+            default_factory_type = ctx.arg_types[1][0]
+
+            # Functions which use `ParamSpec` can be overloaded, exposing the callable's types as a parameter
+            # Pydantic calls the default factory without any argument, so we retrieve the first item
+            if isinstance(default_factory_type, Overloaded):
+                if MYPY_VERSION_TUPLE > (0, 910):
+                    default_factory_type = default_factory_type.items[0]
+                else:
+                    # Mypy0.910 exposes the items of overloaded types in a function
+                    default_factory_type = default_factory_type.items()[0]  # type: ignore[operator]
+
+            if isinstance(default_factory_type, CallableType):
+                ret_type = default_factory_type.ret_type
+                # mypy doesn't think `ret_type` has `args`, you'd think mypy should know,
+                # add this check in case it varies by version
+                args = getattr(ret_type, 'args', None)
+                if args:
+                    if all(isinstance(arg, TypeVarType) for arg in args):
+                        # Looks like the default factory is a type like `list` or `dict`, replace all args with `Any`
+                        ret_type.args = tuple(default_any_type for _ in args)  # type: ignore[attr-defined]
+                return ret_type
+
+        return default_any_type
+
+
+class PydanticPluginConfig:
+    __slots__ = (
+        'init_forbid_extra',
+        'init_typed',
+        'warn_required_dynamic_aliases',
+        'warn_untyped_fields',
+        'debug_dataclass_transform',
+    )
+    init_forbid_extra: bool
+    init_typed: bool
+    warn_required_dynamic_aliases: bool
+    warn_untyped_fields: bool
+    debug_dataclass_transform: bool  # undocumented
+
+    def __init__(self, options: Options) -> None:
+        if options.config_file is None:  # pragma: no cover
+            return
+
+        toml_config = parse_toml(options.config_file)
+        if toml_config is not None:
+            config = toml_config.get('tool', {}).get('pydantic-mypy', {})
+            for key in self.__slots__:
+                setting = config.get(key, False)
+                if not isinstance(setting, bool):
+                    raise ValueError(f'Configuration value must be a boolean for key: {key}')
+                setattr(self, key, setting)
+        else:
+            plugin_config = ConfigParser()
+            plugin_config.read(options.config_file)
+            for key in self.__slots__:
+                setting = plugin_config.getboolean(CONFIGFILE_KEY, key, fallback=False)
+                setattr(self, key, setting)
+
+    def to_data(self) -> Dict[str, Any]:
+        return {key: getattr(self, key) for key in self.__slots__}
+
+
+def from_orm_callback(ctx: MethodContext) -> Type:
+    """
+    Raise an error if orm_mode is not enabled
+    """
+    model_type: Instance
+    ctx_type = ctx.type
+    if isinstance(ctx_type, TypeType):
+        ctx_type = ctx_type.item
+    if isinstance(ctx_type, CallableType) and isinstance(ctx_type.ret_type, Instance):
+        model_type = ctx_type.ret_type  # called on the class
+    elif isinstance(ctx_type, Instance):
+        model_type = ctx_type  # called on an instance (unusual, but still valid)
+    else:  # pragma: no cover
+        detail = f'ctx.type: {ctx_type} (of type {ctx_type.__class__.__name__})'
+        error_unexpected_behavior(detail, ctx.api, ctx.context)
+        return ctx.default_return_type
+    pydantic_metadata = model_type.type.metadata.get(METADATA_KEY)
+    if pydantic_metadata is None:
+        return ctx.default_return_type
+    orm_mode = pydantic_metadata.get('config', {}).get('orm_mode')
+    if orm_mode is not True:
+        error_from_orm(get_name(model_type.type), ctx.api, ctx.context)
+    return ctx.default_return_type
+
+
+class PydanticModelTransformer:
+    tracked_config_fields: Set[str] = {
+        'extra',
+        'allow_mutation',
+        'frozen',
+        'orm_mode',
+        'allow_population_by_field_name',
+        'alias_generator',
+    }
+
+    def __init__(self, ctx: ClassDefContext, plugin_config: PydanticPluginConfig) -> None:
+        self._ctx = ctx
+        self.plugin_config = plugin_config
+
+    def transform(self) -> None:
+        """
+        Configures the BaseModel subclass according to the plugin settings.
+
+        In particular:
+        * determines the model config and fields,
+        * adds a fields-aware signature for the initializer and construct methods
+        * freezes the class if allow_mutation = False or frozen = True
+        * stores the fields, config, and if the class is settings in the mypy metadata for access by subclasses
+        """
+        ctx = self._ctx
+        info = ctx.cls.info
+
+        self.adjust_validator_signatures()
+        config = self.collect_config()
+        fields = self.collect_fields(config)
+        is_settings = any(get_fullname(base) == BASESETTINGS_FULLNAME for base in info.mro[:-1])
+        self.add_initializer(fields, config, is_settings)
+        self.add_construct_method(fields)
+        self.set_frozen(fields, frozen=config.allow_mutation is False or config.frozen is True)
+        info.metadata[METADATA_KEY] = {
+            'fields': {field.name: field.serialize() for field in fields},
+            'config': config.set_values_dict(),
+        }
+
+    def adjust_validator_signatures(self) -> None:
+        """When we decorate a function `f` with `pydantic.validator(...), mypy sees
+        `f` as a regular method taking a `self` instance, even though pydantic
+        internally wraps `f` with `classmethod` if necessary.
+
+        Teach mypy this by marking any function whose outermost decorator is a
+        `validator()` call as a classmethod.
+        """
+        for name, sym in self._ctx.cls.info.names.items():
+            if isinstance(sym.node, Decorator):
+                first_dec = sym.node.original_decorators[0]
+                if (
+                    isinstance(first_dec, CallExpr)
+                    and isinstance(first_dec.callee, NameExpr)
+                    and first_dec.callee.fullname == f'{_NAMESPACE}.class_validators.validator'
+                ):
+                    sym.node.func.is_class = True
+
+    def collect_config(self) -> 'ModelConfigData':
+        """
+        Collects the values of the config attributes that are used by the plugin, accounting for parent classes.
+        """
+        ctx = self._ctx
+        cls = ctx.cls
+        config = ModelConfigData()
+        for stmt in cls.defs.body:
+            if not isinstance(stmt, ClassDef):
+                continue
+            if stmt.name == 'Config':
+                for substmt in stmt.defs.body:
+                    if not isinstance(substmt, AssignmentStmt):
+                        continue
+                    config.update(self.get_config_update(substmt))
+                if (
+                    config.has_alias_generator
+                    and not config.allow_population_by_field_name
+                    and self.plugin_config.warn_required_dynamic_aliases
+                ):
+                    error_required_dynamic_aliases(ctx.api, stmt)
+        for info in cls.info.mro[1:]:  # 0 is the current class
+            if METADATA_KEY not in info.metadata:
+                continue
+
+            # Each class depends on the set of fields in its ancestors
+            ctx.api.add_plugin_dependency(make_wildcard_trigger(get_fullname(info)))
+            for name, value in info.metadata[METADATA_KEY]['config'].items():
+                config.setdefault(name, value)
+        return config
+
+    def collect_fields(self, model_config: 'ModelConfigData') -> List['PydanticModelField']:
+        """
+        Collects the fields for the model, accounting for parent classes
+        """
+        # First, collect fields belonging to the current class.
+        ctx = self._ctx
+        cls = self._ctx.cls
+        fields = []  # type: List[PydanticModelField]
+        known_fields = set()  # type: Set[str]
+        for stmt in cls.defs.body:
+            if not isinstance(stmt, AssignmentStmt):  # `and stmt.new_syntax` to require annotation
+                continue
+
+            lhs = stmt.lvalues[0]
+            if not isinstance(lhs, NameExpr) or not is_valid_field(lhs.name):
+                continue
+
+            if not stmt.new_syntax and self.plugin_config.warn_untyped_fields:
+                error_untyped_fields(ctx.api, stmt)
+
+            # if lhs.name == '__config__':  # BaseConfig not well handled; I'm not sure why yet
+            #     continue
+
+            sym = cls.info.names.get(lhs.name)
+            if sym is None:  # pragma: no cover
+                # This is likely due to a star import (see the dataclasses plugin for a more detailed explanation)
+                # This is the same logic used in the dataclasses plugin
+                continue
+
+            node = sym.node
+            if isinstance(node, PlaceholderNode):  # pragma: no cover
+                # See the PlaceholderNode docstring for more detail about how this can occur
+                # Basically, it is an edge case when dealing with complex import logic
+                # This is the same logic used in the dataclasses plugin
+                continue
+            if not isinstance(node, Var):  # pragma: no cover
+                # Don't know if this edge case still happens with the `is_valid_field` check above
+                # but better safe than sorry
+                continue
+
+            # x: ClassVar[int] is ignored by dataclasses.
+            if node.is_classvar:
+                continue
+
+            is_required = self.get_is_required(cls, stmt, lhs)
+            alias, has_dynamic_alias = self.get_alias_info(stmt)
+            if (
+                has_dynamic_alias
+                and not model_config.allow_population_by_field_name
+                and self.plugin_config.warn_required_dynamic_aliases
+            ):
+                error_required_dynamic_aliases(ctx.api, stmt)
+            fields.append(
+                PydanticModelField(
+                    name=lhs.name,
+                    is_required=is_required,
+                    alias=alias,
+                    has_dynamic_alias=has_dynamic_alias,
+                    line=stmt.line,
+                    column=stmt.column,
+                )
+            )
+            known_fields.add(lhs.name)
+        all_fields = fields.copy()
+        for info in cls.info.mro[1:]:  # 0 is the current class, -2 is BaseModel, -1 is object
+            if METADATA_KEY not in info.metadata:
+                continue
+
+            superclass_fields = []
+            # Each class depends on the set of fields in its ancestors
+            ctx.api.add_plugin_dependency(make_wildcard_trigger(get_fullname(info)))
+
+            for name, data in info.metadata[METADATA_KEY]['fields'].items():
+                if name not in known_fields:
+                    field = PydanticModelField.deserialize(info, data)
+                    known_fields.add(name)
+                    superclass_fields.append(field)
+                else:
+                    (field,) = (a for a in all_fields if a.name == name)
+                    all_fields.remove(field)
+                    superclass_fields.append(field)
+            all_fields = superclass_fields + all_fields
+        return all_fields
+
+    def add_initializer(self, fields: List['PydanticModelField'], config: 'ModelConfigData', is_settings: bool) -> None:
+        """
+        Adds a fields-aware `__init__` method to the class.
+
+        The added `__init__` will be annotated with types vs. all `Any` depending on the plugin settings.
+        """
+        ctx = self._ctx
+        typed = self.plugin_config.init_typed
+        use_alias = config.allow_population_by_field_name is not True
+        force_all_optional = is_settings or bool(
+            config.has_alias_generator and not config.allow_population_by_field_name
+        )
+        init_arguments = self.get_field_arguments(
+            fields, typed=typed, force_all_optional=force_all_optional, use_alias=use_alias
+        )
+        if not self.should_init_forbid_extra(fields, config):
+            var = Var('kwargs')
+            init_arguments.append(Argument(var, AnyType(TypeOfAny.explicit), None, ARG_STAR2))
+
+        if '__init__' not in ctx.cls.info.names:
+            add_method(ctx, '__init__', init_arguments, NoneType())
+
+    def add_construct_method(self, fields: List['PydanticModelField']) -> None:
+        """
+        Adds a fully typed `construct` classmethod to the class.
+
+        Similar to the fields-aware __init__ method, but always uses the field names (not aliases),
+        and does not treat settings fields as optional.
+        """
+        ctx = self._ctx
+        set_str = ctx.api.named_type(f'{BUILTINS_NAME}.set', [ctx.api.named_type(f'{BUILTINS_NAME}.str')])
+        optional_set_str = UnionType([set_str, NoneType()])
+        fields_set_argument = Argument(Var('_fields_set', optional_set_str), optional_set_str, None, ARG_OPT)
+        construct_arguments = self.get_field_arguments(fields, typed=True, force_all_optional=False, use_alias=False)
+        construct_arguments = [fields_set_argument] + construct_arguments
+
+        obj_type = ctx.api.named_type(f'{BUILTINS_NAME}.object')
+        self_tvar_name = '_PydanticBaseModel'  # Make sure it does not conflict with other names in the class
+        tvar_fullname = ctx.cls.fullname + '.' + self_tvar_name
+        if MYPY_VERSION_TUPLE >= (1, 4):
+            tvd = TypeVarType(
+                self_tvar_name,
+                tvar_fullname,
+                (
+                    TypeVarId(-1, namespace=ctx.cls.fullname + '.construct')
+                    if MYPY_VERSION_TUPLE >= (1, 11)
+                    else TypeVarId(-1)
+                ),
+                [],
+                obj_type,
+                AnyType(TypeOfAny.from_omitted_generics),  # type: ignore[arg-type]
+            )
+            self_tvar_expr = TypeVarExpr(
+                self_tvar_name,
+                tvar_fullname,
+                [],
+                obj_type,
+                AnyType(TypeOfAny.from_omitted_generics),  # type: ignore[arg-type]
+            )
+        else:
+            tvd = TypeVarDef(self_tvar_name, tvar_fullname, -1, [], obj_type)
+            self_tvar_expr = TypeVarExpr(self_tvar_name, tvar_fullname, [], obj_type)
+        ctx.cls.info.names[self_tvar_name] = SymbolTableNode(MDEF, self_tvar_expr)
+
+        # Backward-compatible with TypeVarDef from Mypy 0.910.
+        if isinstance(tvd, TypeVarType):
+            self_type = tvd
+        else:
+            self_type = TypeVarType(tvd)
+
+        add_method(
+            ctx,
+            'construct',
+            construct_arguments,
+            return_type=self_type,
+            self_type=self_type,
+            tvar_def=tvd,
+            is_classmethod=True,
+        )
+
+    def set_frozen(self, fields: List['PydanticModelField'], frozen: bool) -> None:
+        """
+        Marks all fields as properties so that attempts to set them trigger mypy errors.
+
+        This is the same approach used by the attrs and dataclasses plugins.
+        """
+        ctx = self._ctx
+        info = ctx.cls.info
+        for field in fields:
+            sym_node = info.names.get(field.name)
+            if sym_node is not None:
+                var = sym_node.node
+                if isinstance(var, Var):
+                    var.is_property = frozen
+                elif isinstance(var, PlaceholderNode) and not ctx.api.final_iteration:
+                    # See https://github.com/pydantic/pydantic/issues/5191 to hit this branch for test coverage
+                    ctx.api.defer()
+                else:  # pragma: no cover
+                    # I don't know whether it's possible to hit this branch, but I've added it for safety
+                    try:
+                        var_str = str(var)
+                    except TypeError:
+                        # This happens for PlaceholderNode; perhaps it will happen for other types in the future..
+                        var_str = repr(var)
+                    detail = f'sym_node.node: {var_str} (of type {var.__class__})'
+                    error_unexpected_behavior(detail, ctx.api, ctx.cls)
+            else:
+                var = field.to_var(info, use_alias=False)
+                var.info = info
+                var.is_property = frozen
+                var._fullname = get_fullname(info) + '.' + get_name(var)
+                info.names[get_name(var)] = SymbolTableNode(MDEF, var)
+
+    def get_config_update(self, substmt: AssignmentStmt) -> Optional['ModelConfigData']:
+        """
+        Determines the config update due to a single statement in the Config class definition.
+
+        Warns if a tracked config attribute is set to a value the plugin doesn't know how to interpret (e.g., an int)
+        """
+        lhs = substmt.lvalues[0]
+        if not (isinstance(lhs, NameExpr) and lhs.name in self.tracked_config_fields):
+            return None
+        if lhs.name == 'extra':
+            if isinstance(substmt.rvalue, StrExpr):
+                forbid_extra = substmt.rvalue.value == 'forbid'
+            elif isinstance(substmt.rvalue, MemberExpr):
+                forbid_extra = substmt.rvalue.name == 'forbid'
+            else:
+                error_invalid_config_value(lhs.name, self._ctx.api, substmt)
+                return None
+            return ModelConfigData(forbid_extra=forbid_extra)
+        if lhs.name == 'alias_generator':
+            has_alias_generator = True
+            if isinstance(substmt.rvalue, NameExpr) and substmt.rvalue.fullname == 'builtins.None':
+                has_alias_generator = False
+            return ModelConfigData(has_alias_generator=has_alias_generator)
+        if isinstance(substmt.rvalue, NameExpr) and substmt.rvalue.fullname in ('builtins.True', 'builtins.False'):
+            return ModelConfigData(**{lhs.name: substmt.rvalue.fullname == 'builtins.True'})
+        error_invalid_config_value(lhs.name, self._ctx.api, substmt)
+        return None
+
+    @staticmethod
+    def get_is_required(cls: ClassDef, stmt: AssignmentStmt, lhs: NameExpr) -> bool:
+        """
+        Returns a boolean indicating whether the field defined in `stmt` is a required field.
+        """
+        expr = stmt.rvalue
+        if isinstance(expr, TempNode):
+            # TempNode means annotation-only, so only non-required if Optional
+            value_type = get_proper_type(cls.info[lhs.name].type)
+            return not PydanticModelTransformer.type_has_implicit_default(value_type)
+        if isinstance(expr, CallExpr) and isinstance(expr.callee, RefExpr) and expr.callee.fullname == FIELD_FULLNAME:
+            # The "default value" is a call to `Field`; at this point, the field is
+            # only required if default is Ellipsis (i.e., `field_name: Annotation = Field(...)`) or if default_factory
+            # is specified.
+            for arg, name in zip(expr.args, expr.arg_names):
+                # If name is None, then this arg is the default because it is the only positional argument.
+                if name is None or name == 'default':
+                    return arg.__class__ is EllipsisExpr
+                if name == 'default_factory':
+                    return False
+            # In this case, default and default_factory are not specified, so we need to look at the annotation
+            value_type = get_proper_type(cls.info[lhs.name].type)
+            return not PydanticModelTransformer.type_has_implicit_default(value_type)
+        # Only required if the "default value" is Ellipsis (i.e., `field_name: Annotation = ...`)
+        return isinstance(expr, EllipsisExpr)
+
+    @staticmethod
+    def type_has_implicit_default(type_: Optional[ProperType]) -> bool:
+        """
+        Returns True if the passed type will be given an implicit default value.
+
+        In pydantic v1, this is the case for Optional types and Any (with default value None).
+        """
+        if isinstance(type_, AnyType):
+            # Annotated as Any
+            return True
+        if isinstance(type_, UnionType) and any(
+            isinstance(item, NoneType) or isinstance(item, AnyType) for item in type_.items
+        ):
+            # Annotated as Optional, or otherwise having NoneType or AnyType in the union
+            return True
+        return False
+
+    @staticmethod
+    def get_alias_info(stmt: AssignmentStmt) -> Tuple[Optional[str], bool]:
+        """
+        Returns a pair (alias, has_dynamic_alias), extracted from the declaration of the field defined in `stmt`.
+
+        `has_dynamic_alias` is True if and only if an alias is provided, but not as a string literal.
+        If `has_dynamic_alias` is True, `alias` will be None.
+        """
+        expr = stmt.rvalue
+        if isinstance(expr, TempNode):
+            # TempNode means annotation-only
+            return None, False
+
+        if not (
+            isinstance(expr, CallExpr) and isinstance(expr.callee, RefExpr) and expr.callee.fullname == FIELD_FULLNAME
+        ):
+            # Assigned value is not a call to pydantic.fields.Field
+            return None, False
+
+        for i, arg_name in enumerate(expr.arg_names):
+            if arg_name != 'alias':
+                continue
+            arg = expr.args[i]
+            if isinstance(arg, StrExpr):
+                return arg.value, False
+            else:
+                return None, True
+        return None, False
+
+    def get_field_arguments(
+        self, fields: List['PydanticModelField'], typed: bool, force_all_optional: bool, use_alias: bool
+    ) -> List[Argument]:
+        """
+        Helper function used during the construction of the `__init__` and `construct` method signatures.
+
+        Returns a list of mypy Argument instances for use in the generated signatures.
+        """
+        info = self._ctx.cls.info
+        arguments = [
+            field.to_argument(info, typed=typed, force_optional=force_all_optional, use_alias=use_alias)
+            for field in fields
+            if not (use_alias and field.has_dynamic_alias)
+        ]
+        return arguments
+
+    def should_init_forbid_extra(self, fields: List['PydanticModelField'], config: 'ModelConfigData') -> bool:
+        """
+        Indicates whether the generated `__init__` should get a `**kwargs` at the end of its signature
+
+        We disallow arbitrary kwargs if the extra config setting is "forbid", or if the plugin config says to,
+        *unless* a required dynamic alias is present (since then we can't determine a valid signature).
+        """
+        if not config.allow_population_by_field_name:
+            if self.is_dynamic_alias_present(fields, bool(config.has_alias_generator)):
+                return False
+        if config.forbid_extra:
+            return True
+        return self.plugin_config.init_forbid_extra
+
+    @staticmethod
+    def is_dynamic_alias_present(fields: List['PydanticModelField'], has_alias_generator: bool) -> bool:
+        """
+        Returns whether any fields on the model have a "dynamic alias", i.e., an alias that cannot be
+        determined during static analysis.
+        """
+        for field in fields:
+            if field.has_dynamic_alias:
+                return True
+        if has_alias_generator:
+            for field in fields:
+                if field.alias is None:
+                    return True
+        return False
+
+
+class PydanticModelField:
+    def __init__(
+        self, name: str, is_required: bool, alias: Optional[str], has_dynamic_alias: bool, line: int, column: int
+    ):
+        self.name = name
+        self.is_required = is_required
+        self.alias = alias
+        self.has_dynamic_alias = has_dynamic_alias
+        self.line = line
+        self.column = column
+
+    def to_var(self, info: TypeInfo, use_alias: bool) -> Var:
+        name = self.name
+        if use_alias and self.alias is not None:
+            name = self.alias
+        return Var(name, info[self.name].type)
+
+    def to_argument(self, info: TypeInfo, typed: bool, force_optional: bool, use_alias: bool) -> Argument:
+        if typed and info[self.name].type is not None:
+            type_annotation = info[self.name].type
+        else:
+            type_annotation = AnyType(TypeOfAny.explicit)
+        return Argument(
+            variable=self.to_var(info, use_alias),
+            type_annotation=type_annotation,
+            initializer=None,
+            kind=ARG_NAMED_OPT if force_optional or not self.is_required else ARG_NAMED,
+        )
+
+    def serialize(self) -> JsonDict:
+        return self.__dict__
+
+    @classmethod
+    def deserialize(cls, info: TypeInfo, data: JsonDict) -> 'PydanticModelField':
+        return cls(**data)
+
+
+class ModelConfigData:
+    def __init__(
+        self,
+        forbid_extra: Optional[bool] = None,
+        allow_mutation: Optional[bool] = None,
+        frozen: Optional[bool] = None,
+        orm_mode: Optional[bool] = None,
+        allow_population_by_field_name: Optional[bool] = None,
+        has_alias_generator: Optional[bool] = None,
+    ):
+        self.forbid_extra = forbid_extra
+        self.allow_mutation = allow_mutation
+        self.frozen = frozen
+        self.orm_mode = orm_mode
+        self.allow_population_by_field_name = allow_population_by_field_name
+        self.has_alias_generator = has_alias_generator
+
+    def set_values_dict(self) -> Dict[str, Any]:
+        return {k: v for k, v in self.__dict__.items() if v is not None}
+
+    def update(self, config: Optional['ModelConfigData']) -> None:
+        if config is None:
+            return
+        for k, v in config.set_values_dict().items():
+            setattr(self, k, v)
+
+    def setdefault(self, key: str, value: Any) -> None:
+        if getattr(self, key) is None:
+            setattr(self, key, value)
+
+
+ERROR_ORM = ErrorCode('pydantic-orm', 'Invalid from_orm call', 'Pydantic')
+ERROR_CONFIG = ErrorCode('pydantic-config', 'Invalid config value', 'Pydantic')
+ERROR_ALIAS = ErrorCode('pydantic-alias', 'Dynamic alias disallowed', 'Pydantic')
+ERROR_UNEXPECTED = ErrorCode('pydantic-unexpected', 'Unexpected behavior', 'Pydantic')
+ERROR_UNTYPED = ErrorCode('pydantic-field', 'Untyped field disallowed', 'Pydantic')
+ERROR_FIELD_DEFAULTS = ErrorCode('pydantic-field', 'Invalid Field defaults', 'Pydantic')
+
+
+def error_from_orm(model_name: str, api: CheckerPluginInterface, context: Context) -> None:
+    api.fail(f'"{model_name}" does not have orm_mode=True', context, code=ERROR_ORM)
+
+
+def error_invalid_config_value(name: str, api: SemanticAnalyzerPluginInterface, context: Context) -> None:
+    api.fail(f'Invalid value for "Config.{name}"', context, code=ERROR_CONFIG)
+
+
+def error_required_dynamic_aliases(api: SemanticAnalyzerPluginInterface, context: Context) -> None:
+    api.fail('Required dynamic aliases disallowed', context, code=ERROR_ALIAS)
+
+
+def error_unexpected_behavior(
+    detail: str, api: Union[CheckerPluginInterface, SemanticAnalyzerPluginInterface], context: Context
+) -> None:  # pragma: no cover
+    # Can't think of a good way to test this, but I confirmed it renders as desired by adding to a non-error path
+    link = 'https://github.com/pydantic/pydantic/issues/new/choose'
+    full_message = f'The pydantic mypy plugin ran into unexpected behavior: {detail}\n'
+    full_message += f'Please consider reporting this bug at {link} so we can try to fix it!'
+    api.fail(full_message, context, code=ERROR_UNEXPECTED)
+
+
+def error_untyped_fields(api: SemanticAnalyzerPluginInterface, context: Context) -> None:
+    api.fail('Untyped fields disallowed', context, code=ERROR_UNTYPED)
+
+
+def error_default_and_default_factory_specified(api: CheckerPluginInterface, context: Context) -> None:
+    api.fail('Field default and default_factory cannot be specified together', context, code=ERROR_FIELD_DEFAULTS)
+
+
+def add_method(
+    ctx: ClassDefContext,
+    name: str,
+    args: List[Argument],
+    return_type: Type,
+    self_type: Optional[Type] = None,
+    tvar_def: Optional[TypeVarDef] = None,
+    is_classmethod: bool = False,
+    is_new: bool = False,
+    # is_staticmethod: bool = False,
+) -> None:
+    """
+    Adds a new method to a class.
+
+    This can be dropped if/when https://github.com/python/mypy/issues/7301 is merged
+    """
+    info = ctx.cls.info
+
+    # First remove any previously generated methods with the same name
+    # to avoid clashes and problems in the semantic analyzer.
+    if name in info.names:
+        sym = info.names[name]
+        if sym.plugin_generated and isinstance(sym.node, FuncDef):
+            ctx.cls.defs.body.remove(sym.node)  # pragma: no cover
+
+    self_type = self_type or fill_typevars(info)
+    if is_classmethod or is_new:
+        first = [Argument(Var('_cls'), TypeType.make_normalized(self_type), None, ARG_POS)]
+    # elif is_staticmethod:
+    #     first = []
+    else:
+        self_type = self_type or fill_typevars(info)
+        first = [Argument(Var('__pydantic_self__'), self_type, None, ARG_POS)]
+    args = first + args
+    arg_types, arg_names, arg_kinds = [], [], []
+    for arg in args:
+        assert arg.type_annotation, 'All arguments must be fully typed.'
+        arg_types.append(arg.type_annotation)
+        arg_names.append(get_name(arg.variable))
+        arg_kinds.append(arg.kind)
+
+    function_type = ctx.api.named_type(f'{BUILTINS_NAME}.function')
+    signature = CallableType(arg_types, arg_kinds, arg_names, return_type, function_type)
+    if tvar_def:
+        signature.variables = [tvar_def]
+
+    func = FuncDef(name, args, Block([PassStmt()]))
+    func.info = info
+    func.type = set_callable_name(signature, func)
+    func.is_class = is_classmethod
+    # func.is_static = is_staticmethod
+    func._fullname = get_fullname(info) + '.' + name
+    func.line = info.line
+
+    # NOTE: we would like the plugin generated node to dominate, but we still
+    # need to keep any existing definitions so they get semantically analyzed.
+    if name in info.names:
+        # Get a nice unique name instead.
+        r_name = get_unique_redefinition_name(name, info.names)
+        info.names[r_name] = info.names[name]
+
+    if is_classmethod:  # or is_staticmethod:
+        func.is_decorated = True
+        v = Var(name, func.type)
+        v.info = info
+        v._fullname = func._fullname
+        # if is_classmethod:
+        v.is_classmethod = True
+        dec = Decorator(func, [NameExpr('classmethod')], v)
+        # else:
+        #     v.is_staticmethod = True
+        #     dec = Decorator(func, [NameExpr('staticmethod')], v)
+
+        dec.line = info.line
+        sym = SymbolTableNode(MDEF, dec)
+    else:
+        sym = SymbolTableNode(MDEF, func)
+    sym.plugin_generated = True
+
+    info.names[name] = sym
+    info.defn.defs.body.append(func)
+
+
+def get_fullname(x: Union[FuncBase, SymbolNode]) -> str:
+    """
+    Used for compatibility with mypy 0.740; can be dropped once support for 0.740 is dropped.
+    """
+    fn = x.fullname
+    if callable(fn):  # pragma: no cover
+        return fn()
+    return fn
+
+
+def get_name(x: Union[FuncBase, SymbolNode]) -> str:
+    """
+    Used for compatibility with mypy 0.740; can be dropped once support for 0.740 is dropped.
+    """
+    fn = x.name
+    if callable(fn):  # pragma: no cover
+        return fn()
+    return fn
+
+
+def parse_toml(config_file: str) -> Optional[Dict[str, Any]]:
+    if not config_file.endswith('.toml'):
+        return None
+
+    read_mode = 'rb'
+    if sys.version_info >= (3, 11):
+        import tomllib as toml_
+    else:
+        try:
+            import tomli as toml_
+        except ImportError:
+            # older versions of mypy have toml as a dependency, not tomli
+            read_mode = 'r'
+            try:
+                import toml as toml_  # type: ignore[no-redef]
+            except ImportError:  # pragma: no cover
+                import warnings
+
+                warnings.warn('No TOML parser installed, cannot read configuration from `pyproject.toml`.')
+                return None
+
+    with open(config_file, read_mode) as rf:
+        return toml_.load(rf)  # type: ignore[arg-type]
diff --git a/.venv/lib/python3.12/site-packages/pydantic/v1/networks.py b/.venv/lib/python3.12/site-packages/pydantic/v1/networks.py
new file mode 100644
index 00000000..ba07b748
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pydantic/v1/networks.py
@@ -0,0 +1,747 @@
+import re
+from ipaddress import (
+    IPv4Address,
+    IPv4Interface,
+    IPv4Network,
+    IPv6Address,
+    IPv6Interface,
+    IPv6Network,
+    _BaseAddress,
+    _BaseNetwork,
+)
+from typing import (
+    TYPE_CHECKING,
+    Any,
+    Collection,
+    Dict,
+    Generator,
+    List,
+    Match,
+    Optional,
+    Pattern,
+    Set,
+    Tuple,
+    Type,
+    Union,
+    cast,
+    no_type_check,
+)
+
+from pydantic.v1 import errors
+from pydantic.v1.utils import Representation, update_not_none
+from pydantic.v1.validators import constr_length_validator, str_validator
+
+if TYPE_CHECKING:
+    import email_validator
+    from typing_extensions import TypedDict
+
+    from pydantic.v1.config import BaseConfig
+    from pydantic.v1.fields import ModelField
+    from pydantic.v1.typing import AnyCallable
+
+    CallableGenerator = Generator[AnyCallable, None, None]
+
+    class Parts(TypedDict, total=False):
+        scheme: str
+        user: Optional[str]
+        password: Optional[str]
+        ipv4: Optional[str]
+        ipv6: Optional[str]
+        domain: Optional[str]
+        port: Optional[str]
+        path: Optional[str]
+        query: Optional[str]
+        fragment: Optional[str]
+
+    class HostParts(TypedDict, total=False):
+        host: str
+        tld: Optional[str]
+        host_type: Optional[str]
+        port: Optional[str]
+        rebuild: bool
+
+else:
+    email_validator = None
+
+    class Parts(dict):
+        pass
+
+
+NetworkType = Union[str, bytes, int, Tuple[Union[str, bytes, int], Union[str, int]]]
+
+__all__ = [
+    'AnyUrl',
+    'AnyHttpUrl',
+    'FileUrl',
+    'HttpUrl',
+    'stricturl',
+    'EmailStr',
+    'NameEmail',
+    'IPvAnyAddress',
+    'IPvAnyInterface',
+    'IPvAnyNetwork',
+    'PostgresDsn',
+    'CockroachDsn',
+    'AmqpDsn',
+    'RedisDsn',
+    'MongoDsn',
+    'KafkaDsn',
+    'validate_email',
+]
+
+_url_regex_cache = None
+_multi_host_url_regex_cache = None
+_ascii_domain_regex_cache = None
+_int_domain_regex_cache = None
+_host_regex_cache = None
+
+_host_regex = (
+    r'(?:'
+    r'(?P<ipv4>(?:\d{1,3}\.){3}\d{1,3})(?=$|[/:#?])|'  # ipv4
+    r'(?P<ipv6>\[[A-F0-9]*:[A-F0-9:]+\])(?=$|[/:#?])|'  # ipv6
+    r'(?P<domain>[^\s/:?#]+)'  # domain, validation occurs later
+    r')?'
+    r'(?::(?P<port>\d+))?'  # port
+)
+_scheme_regex = r'(?:(?P<scheme>[a-z][a-z0-9+\-.]+)://)?'  # scheme https://tools.ietf.org/html/rfc3986#appendix-A
+_user_info_regex = r'(?:(?P<user>[^\s:/]*)(?::(?P<password>[^\s/]*))?@)?'
+_path_regex = r'(?P<path>/[^\s?#]*)?'
+_query_regex = r'(?:\?(?P<query>[^\s#]*))?'
+_fragment_regex = r'(?:#(?P<fragment>[^\s#]*))?'
+
+
+def url_regex() -> Pattern[str]:
+    global _url_regex_cache
+    if _url_regex_cache is None:
+        _url_regex_cache = re.compile(
+            rf'{_scheme_regex}{_user_info_regex}{_host_regex}{_path_regex}{_query_regex}{_fragment_regex}',
+            re.IGNORECASE,
+        )
+    return _url_regex_cache
+
+
+def multi_host_url_regex() -> Pattern[str]:
+    """
+    Compiled multi host url regex.
+
+    Additionally to `url_regex` it allows to match multiple hosts.
+    E.g. host1.db.net,host2.db.net
+    """
+    global _multi_host_url_regex_cache
+    if _multi_host_url_regex_cache is None:
+        _multi_host_url_regex_cache = re.compile(
+            rf'{_scheme_regex}{_user_info_regex}'
+            r'(?P<hosts>([^/]*))'  # validation occurs later
+            rf'{_path_regex}{_query_regex}{_fragment_regex}',
+            re.IGNORECASE,
+        )
+    return _multi_host_url_regex_cache
+
+
+def ascii_domain_regex() -> Pattern[str]:
+    global _ascii_domain_regex_cache
+    if _ascii_domain_regex_cache is None:
+        ascii_chunk = r'[_0-9a-z](?:[-_0-9a-z]{0,61}[_0-9a-z])?'
+        ascii_domain_ending = r'(?P<tld>\.[a-z]{2,63})?\.?'
+        _ascii_domain_regex_cache = re.compile(
+            fr'(?:{ascii_chunk}\.)*?{ascii_chunk}{ascii_domain_ending}', re.IGNORECASE
+        )
+    return _ascii_domain_regex_cache
+
+
+def int_domain_regex() -> Pattern[str]:
+    global _int_domain_regex_cache
+    if _int_domain_regex_cache is None:
+        int_chunk = r'[_0-9a-\U00040000](?:[-_0-9a-\U00040000]{0,61}[_0-9a-\U00040000])?'
+        int_domain_ending = r'(?P<tld>(\.[^\W\d_]{2,63})|(\.(?:xn--)[_0-9a-z-]{2,63}))?\.?'
+        _int_domain_regex_cache = re.compile(fr'(?:{int_chunk}\.)*?{int_chunk}{int_domain_ending}', re.IGNORECASE)
+    return _int_domain_regex_cache
+
+
+def host_regex() -> Pattern[str]:
+    global _host_regex_cache
+    if _host_regex_cache is None:
+        _host_regex_cache = re.compile(
+            _host_regex,
+            re.IGNORECASE,
+        )
+    return _host_regex_cache
+
+
+class AnyUrl(str):
+    strip_whitespace = True
+    min_length = 1
+    max_length = 2**16
+    allowed_schemes: Optional[Collection[str]] = None
+    tld_required: bool = False
+    user_required: bool = False
+    host_required: bool = True
+    hidden_parts: Set[str] = set()
+
+    __slots__ = ('scheme', 'user', 'password', 'host', 'tld', 'host_type', 'port', 'path', 'query', 'fragment')
+
+    @no_type_check
+    def __new__(cls, url: Optional[str], **kwargs) -> object:
+        return str.__new__(cls, cls.build(**kwargs) if url is None else url)
+
+    def __init__(
+        self,
+        url: str,
+        *,
+        scheme: str,
+        user: Optional[str] = None,
+        password: Optional[str] = None,
+        host: Optional[str] = None,
+        tld: Optional[str] = None,
+        host_type: str = 'domain',
+        port: Optional[str] = None,
+        path: Optional[str] = None,
+        query: Optional[str] = None,
+        fragment: Optional[str] = None,
+    ) -> None:
+        str.__init__(url)
+        self.scheme = scheme
+        self.user = user
+        self.password = password
+        self.host = host
+        self.tld = tld
+        self.host_type = host_type
+        self.port = port
+        self.path = path
+        self.query = query
+        self.fragment = fragment
+
+    @classmethod
+    def build(
+        cls,
+        *,
+        scheme: str,
+        user: Optional[str] = None,
+        password: Optional[str] = None,
+        host: str,
+        port: Optional[str] = None,
+        path: Optional[str] = None,
+        query: Optional[str] = None,
+        fragment: Optional[str] = None,
+        **_kwargs: str,
+    ) -> str:
+        parts = Parts(
+            scheme=scheme,
+            user=user,
+            password=password,
+            host=host,
+            port=port,
+            path=path,
+            query=query,
+            fragment=fragment,
+            **_kwargs,  # type: ignore[misc]
+        )
+
+        url = scheme + '://'
+        if user:
+            url += user
+        if password:
+            url += ':' + password
+        if user or password:
+            url += '@'
+        url += host
+        if port and ('port' not in cls.hidden_parts or cls.get_default_parts(parts).get('port') != port):
+            url += ':' + port
+        if path:
+            url += path
+        if query:
+            url += '?' + query
+        if fragment:
+            url += '#' + fragment
+        return url
+
+    @classmethod
+    def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None:
+        update_not_none(field_schema, minLength=cls.min_length, maxLength=cls.max_length, format='uri')
+
+    @classmethod
+    def __get_validators__(cls) -> 'CallableGenerator':
+        yield cls.validate
+
+    @classmethod
+    def validate(cls, value: Any, field: 'ModelField', config: 'BaseConfig') -> 'AnyUrl':
+        if value.__class__ == cls:
+            return value
+        value = str_validator(value)
+        if cls.strip_whitespace:
+            value = value.strip()
+        url: str = cast(str, constr_length_validator(value, field, config))
+
+        m = cls._match_url(url)
+        # the regex should always match, if it doesn't please report with details of the URL tried
+        assert m, 'URL regex failed unexpectedly'
+
+        original_parts = cast('Parts', m.groupdict())
+        parts = cls.apply_default_parts(original_parts)
+        parts = cls.validate_parts(parts)
+
+        if m.end() != len(url):
+            raise errors.UrlExtraError(extra=url[m.end() :])
+
+        return cls._build_url(m, url, parts)
+
+    @classmethod
+    def _build_url(cls, m: Match[str], url: str, parts: 'Parts') -> 'AnyUrl':
+        """
+        Validate hosts and build the AnyUrl object. Split from `validate` so this method
+        can be altered in `MultiHostDsn`.
+        """
+        host, tld, host_type, rebuild = cls.validate_host(parts)
+
+        return cls(
+            None if rebuild else url,
+            scheme=parts['scheme'],
+            user=parts['user'],
+            password=parts['password'],
+            host=host,
+            tld=tld,
+            host_type=host_type,
+            port=parts['port'],
+            path=parts['path'],
+            query=parts['query'],
+            fragment=parts['fragment'],
+        )
+
+    @staticmethod
+    def _match_url(url: str) -> Optional[Match[str]]:
+        return url_regex().match(url)
+
+    @staticmethod
+    def _validate_port(port: Optional[str]) -> None:
+        if port is not None and int(port) > 65_535:
+            raise errors.UrlPortError()
+
+    @classmethod
+    def validate_parts(cls, parts: 'Parts', validate_port: bool = True) -> 'Parts':
+        """
+        A method used to validate parts of a URL.
+        Could be overridden to set default values for parts if missing
+        """
+        scheme = parts['scheme']
+        if scheme is None:
+            raise errors.UrlSchemeError()
+
+        if cls.allowed_schemes and scheme.lower() not in cls.allowed_schemes:
+            raise errors.UrlSchemePermittedError(set(cls.allowed_schemes))
+
+        if validate_port:
+            cls._validate_port(parts['port'])
+
+        user = parts['user']
+        if cls.user_required and user is None:
+            raise errors.UrlUserInfoError()
+
+        return parts
+
+    @classmethod
+    def validate_host(cls, parts: 'Parts') -> Tuple[str, Optional[str], str, bool]:
+        tld, host_type, rebuild = None, None, False
+        for f in ('domain', 'ipv4', 'ipv6'):
+            host = parts[f]  # type: ignore[literal-required]
+            if host:
+                host_type = f
+                break
+
+        if host is None:
+            if cls.host_required:
+                raise errors.UrlHostError()
+        elif host_type == 'domain':
+            is_international = False
+            d = ascii_domain_regex().fullmatch(host)
+            if d is None:
+                d = int_domain_regex().fullmatch(host)
+                if d is None:
+                    raise errors.UrlHostError()
+                is_international = True
+
+            tld = d.group('tld')
+            if tld is None and not is_international:
+                d = int_domain_regex().fullmatch(host)
+                assert d is not None
+                tld = d.group('tld')
+                is_international = True
+
+            if tld is not None:
+                tld = tld[1:]
+            elif cls.tld_required:
+                raise errors.UrlHostTldError()
+
+            if is_international:
+                host_type = 'int_domain'
+                rebuild = True
+                host = host.encode('idna').decode('ascii')
+                if tld is not None:
+                    tld = tld.encode('idna').decode('ascii')
+
+        return host, tld, host_type, rebuild  # type: ignore
+
+    @staticmethod
+    def get_default_parts(parts: 'Parts') -> 'Parts':
+        return {}
+
+    @classmethod
+    def apply_default_parts(cls, parts: 'Parts') -> 'Parts':
+        for key, value in cls.get_default_parts(parts).items():
+            if not parts[key]:  # type: ignore[literal-required]
+                parts[key] = value  # type: ignore[literal-required]
+        return parts
+
+    def __repr__(self) -> str:
+        extra = ', '.join(f'{n}={getattr(self, n)!r}' for n in self.__slots__ if getattr(self, n) is not None)
+        return f'{self.__class__.__name__}({super().__repr__()}, {extra})'
+
+
+class AnyHttpUrl(AnyUrl):
+    allowed_schemes = {'http', 'https'}
+
+    __slots__ = ()
+
+
+class HttpUrl(AnyHttpUrl):
+    tld_required = True
+    # https://stackoverflow.com/questions/417142/what-is-the-maximum-length-of-a-url-in-different-browsers
+    max_length = 2083
+    hidden_parts = {'port'}
+
+    @staticmethod
+    def get_default_parts(parts: 'Parts') -> 'Parts':
+        return {'port': '80' if parts['scheme'] == 'http' else '443'}
+
+
+class FileUrl(AnyUrl):
+    allowed_schemes = {'file'}
+    host_required = False
+
+    __slots__ = ()
+
+
+class MultiHostDsn(AnyUrl):
+    __slots__ = AnyUrl.__slots__ + ('hosts',)
+
+    def __init__(self, *args: Any, hosts: Optional[List['HostParts']] = None, **kwargs: Any):
+        super().__init__(*args, **kwargs)
+        self.hosts = hosts
+
+    @staticmethod
+    def _match_url(url: str) -> Optional[Match[str]]:
+        return multi_host_url_regex().match(url)
+
+    @classmethod
+    def validate_parts(cls, parts: 'Parts', validate_port: bool = True) -> 'Parts':
+        return super().validate_parts(parts, validate_port=False)
+
+    @classmethod
+    def _build_url(cls, m: Match[str], url: str, parts: 'Parts') -> 'MultiHostDsn':
+        hosts_parts: List['HostParts'] = []
+        host_re = host_regex()
+        for host in m.groupdict()['hosts'].split(','):
+            d: Parts = host_re.match(host).groupdict()  # type: ignore
+            host, tld, host_type, rebuild = cls.validate_host(d)
+            port = d.get('port')
+            cls._validate_port(port)
+            hosts_parts.append(
+                {
+                    'host': host,
+                    'host_type': host_type,
+                    'tld': tld,
+                    'rebuild': rebuild,
+                    'port': port,
+                }
+            )
+
+        if len(hosts_parts) > 1:
+            return cls(
+                None if any([hp['rebuild'] for hp in hosts_parts]) else url,
+                scheme=parts['scheme'],
+                user=parts['user'],
+                password=parts['password'],
+                path=parts['path'],
+                query=parts['query'],
+                fragment=parts['fragment'],
+                host_type=None,
+                hosts=hosts_parts,
+            )
+        else:
+            # backwards compatibility with single host
+            host_part = hosts_parts[0]
+            return cls(
+                None if host_part['rebuild'] else url,
+                scheme=parts['scheme'],
+                user=parts['user'],
+                password=parts['password'],
+                host=host_part['host'],
+                tld=host_part['tld'],
+                host_type=host_part['host_type'],
+                port=host_part.get('port'),
+                path=parts['path'],
+                query=parts['query'],
+                fragment=parts['fragment'],
+            )
+
+
+class PostgresDsn(MultiHostDsn):
+    allowed_schemes = {
+        'postgres',
+        'postgresql',
+        'postgresql+asyncpg',
+        'postgresql+pg8000',
+        'postgresql+psycopg',
+        'postgresql+psycopg2',
+        'postgresql+psycopg2cffi',
+        'postgresql+py-postgresql',
+        'postgresql+pygresql',
+    }
+    user_required = True
+
+    __slots__ = ()
+
+
+class CockroachDsn(AnyUrl):
+    allowed_schemes = {
+        'cockroachdb',
+        'cockroachdb+psycopg2',
+        'cockroachdb+asyncpg',
+    }
+    user_required = True
+
+
+class AmqpDsn(AnyUrl):
+    allowed_schemes = {'amqp', 'amqps'}
+    host_required = False
+
+
+class RedisDsn(AnyUrl):
+    __slots__ = ()
+    allowed_schemes = {'redis', 'rediss'}
+    host_required = False
+
+    @staticmethod
+    def get_default_parts(parts: 'Parts') -> 'Parts':
+        return {
+            'domain': 'localhost' if not (parts['ipv4'] or parts['ipv6']) else '',
+            'port': '6379',
+            'path': '/0',
+        }
+
+
+class MongoDsn(AnyUrl):
+    allowed_schemes = {'mongodb'}
+
+    # TODO: Needed to generic "Parts" for "Replica Set", "Sharded Cluster", and other mongodb deployment modes
+    @staticmethod
+    def get_default_parts(parts: 'Parts') -> 'Parts':
+        return {
+            'port': '27017',
+        }
+
+
+class KafkaDsn(AnyUrl):
+    allowed_schemes = {'kafka'}
+
+    @staticmethod
+    def get_default_parts(parts: 'Parts') -> 'Parts':
+        return {
+            'domain': 'localhost',
+            'port': '9092',
+        }
+
+
+def stricturl(
+    *,
+    strip_whitespace: bool = True,
+    min_length: int = 1,
+    max_length: int = 2**16,
+    tld_required: bool = True,
+    host_required: bool = True,
+    allowed_schemes: Optional[Collection[str]] = None,
+) -> Type[AnyUrl]:
+    # use kwargs then define conf in a dict to aid with IDE type hinting
+    namespace = dict(
+        strip_whitespace=strip_whitespace,
+        min_length=min_length,
+        max_length=max_length,
+        tld_required=tld_required,
+        host_required=host_required,
+        allowed_schemes=allowed_schemes,
+    )
+    return type('UrlValue', (AnyUrl,), namespace)
+
+
+def import_email_validator() -> None:
+    global email_validator
+    try:
+        import email_validator
+    except ImportError as e:
+        raise ImportError('email-validator is not installed, run `pip install pydantic[email]`') from e
+
+
+class EmailStr(str):
+    @classmethod
+    def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None:
+        field_schema.update(type='string', format='email')
+
+    @classmethod
+    def __get_validators__(cls) -> 'CallableGenerator':
+        # included here and below so the error happens straight away
+        import_email_validator()
+
+        yield str_validator
+        yield cls.validate
+
+    @classmethod
+    def validate(cls, value: Union[str]) -> str:
+        return validate_email(value)[1]
+
+
+class NameEmail(Representation):
+    __slots__ = 'name', 'email'
+
+    def __init__(self, name: str, email: str):
+        self.name = name
+        self.email = email
+
+    def __eq__(self, other: Any) -> bool:
+        return isinstance(other, NameEmail) and (self.name, self.email) == (other.name, other.email)
+
+    @classmethod
+    def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None:
+        field_schema.update(type='string', format='name-email')
+
+    @classmethod
+    def __get_validators__(cls) -> 'CallableGenerator':
+        import_email_validator()
+
+        yield cls.validate
+
+    @classmethod
+    def validate(cls, value: Any) -> 'NameEmail':
+        if value.__class__ == cls:
+            return value
+        value = str_validator(value)
+        return cls(*validate_email(value))
+
+    def __str__(self) -> str:
+        return f'{self.name} <{self.email}>'
+
+
+class IPvAnyAddress(_BaseAddress):
+    __slots__ = ()
+
+    @classmethod
+    def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None:
+        field_schema.update(type='string', format='ipvanyaddress')
+
+    @classmethod
+    def __get_validators__(cls) -> 'CallableGenerator':
+        yield cls.validate
+
+    @classmethod
+    def validate(cls, value: Union[str, bytes, int]) -> Union[IPv4Address, IPv6Address]:
+        try:
+            return IPv4Address(value)
+        except ValueError:
+            pass
+
+        try:
+            return IPv6Address(value)
+        except ValueError:
+            raise errors.IPvAnyAddressError()
+
+
+class IPvAnyInterface(_BaseAddress):
+    __slots__ = ()
+
+    @classmethod
+    def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None:
+        field_schema.update(type='string', format='ipvanyinterface')
+
+    @classmethod
+    def __get_validators__(cls) -> 'CallableGenerator':
+        yield cls.validate
+
+    @classmethod
+    def validate(cls, value: NetworkType) -> Union[IPv4Interface, IPv6Interface]:
+        try:
+            return IPv4Interface(value)
+        except ValueError:
+            pass
+
+        try:
+            return IPv6Interface(value)
+        except ValueError:
+            raise errors.IPvAnyInterfaceError()
+
+
+class IPvAnyNetwork(_BaseNetwork):  # type: ignore
+    @classmethod
+    def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None:
+        field_schema.update(type='string', format='ipvanynetwork')
+
+    @classmethod
+    def __get_validators__(cls) -> 'CallableGenerator':
+        yield cls.validate
+
+    @classmethod
+    def validate(cls, value: NetworkType) -> Union[IPv4Network, IPv6Network]:
+        # Assume IP Network is defined with a default value for ``strict`` argument.
+        # Define your own class if you want to specify network address check strictness.
+        try:
+            return IPv4Network(value)
+        except ValueError:
+            pass
+
+        try:
+            return IPv6Network(value)
+        except ValueError:
+            raise errors.IPvAnyNetworkError()
+
+
+pretty_email_regex = re.compile(r'([\w ]*?) *<(.*)> *')
+MAX_EMAIL_LENGTH = 2048
+"""Maximum length for an email.
+A somewhat arbitrary but very generous number compared to what is allowed by most implementations.
+"""
+
+
+def validate_email(value: Union[str]) -> Tuple[str, str]:
+    """
+    Email address validation using https://pypi.org/project/email-validator/
+    Notes:
+    * raw ip address (literal) domain parts are not allowed.
+    * "John Doe <local_part@domain.com>" style "pretty" email addresses are processed
+    * spaces are striped from the beginning and end of addresses but no error is raised
+    """
+    if email_validator is None:
+        import_email_validator()
+
+    if len(value) > MAX_EMAIL_LENGTH:
+        raise errors.EmailError()
+
+    m = pretty_email_regex.fullmatch(value)
+    name: Union[str, None] = None
+    if m:
+        name, value = m.groups()
+    email = value.strip()
+    try:
+        parts = email_validator.validate_email(email, check_deliverability=False)
+    except email_validator.EmailNotValidError as e:
+        raise errors.EmailError from e
+
+    if hasattr(parts, 'normalized'):
+        # email-validator >= 2
+        email = parts.normalized
+        assert email is not None
+        name = name or parts.local_part
+        return name, email
+    else:
+        # email-validator >1, <2
+        at_index = email.index('@')
+        local_part = email[:at_index]  # RFC 5321, local part must be case-sensitive.
+        global_part = email[at_index:].lower()
+
+        return name or local_part, local_part + global_part
diff --git a/.venv/lib/python3.12/site-packages/pydantic/v1/parse.py b/.venv/lib/python3.12/site-packages/pydantic/v1/parse.py
new file mode 100644
index 00000000..431d75a6
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pydantic/v1/parse.py
@@ -0,0 +1,66 @@
+import json
+import pickle
+from enum import Enum
+from pathlib import Path
+from typing import Any, Callable, Union
+
+from pydantic.v1.types import StrBytes
+
+
+class Protocol(str, Enum):
+    json = 'json'
+    pickle = 'pickle'
+
+
+def load_str_bytes(
+    b: StrBytes,
+    *,
+    content_type: str = None,
+    encoding: str = 'utf8',
+    proto: Protocol = None,
+    allow_pickle: bool = False,
+    json_loads: Callable[[str], Any] = json.loads,
+) -> Any:
+    if proto is None and content_type:
+        if content_type.endswith(('json', 'javascript')):
+            pass
+        elif allow_pickle and content_type.endswith('pickle'):
+            proto = Protocol.pickle
+        else:
+            raise TypeError(f'Unknown content-type: {content_type}')
+
+    proto = proto or Protocol.json
+
+    if proto == Protocol.json:
+        if isinstance(b, bytes):
+            b = b.decode(encoding)
+        return json_loads(b)
+    elif proto == Protocol.pickle:
+        if not allow_pickle:
+            raise RuntimeError('Trying to decode with pickle with allow_pickle=False')
+        bb = b if isinstance(b, bytes) else b.encode()
+        return pickle.loads(bb)
+    else:
+        raise TypeError(f'Unknown protocol: {proto}')
+
+
+def load_file(
+    path: Union[str, Path],
+    *,
+    content_type: str = None,
+    encoding: str = 'utf8',
+    proto: Protocol = None,
+    allow_pickle: bool = False,
+    json_loads: Callable[[str], Any] = json.loads,
+) -> Any:
+    path = Path(path)
+    b = path.read_bytes()
+    if content_type is None:
+        if path.suffix in ('.js', '.json'):
+            proto = Protocol.json
+        elif path.suffix == '.pkl':
+            proto = Protocol.pickle
+
+    return load_str_bytes(
+        b, proto=proto, content_type=content_type, encoding=encoding, allow_pickle=allow_pickle, json_loads=json_loads
+    )
diff --git a/.venv/lib/python3.12/site-packages/pydantic/v1/py.typed b/.venv/lib/python3.12/site-packages/pydantic/v1/py.typed
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pydantic/v1/py.typed
diff --git a/.venv/lib/python3.12/site-packages/pydantic/v1/schema.py b/.venv/lib/python3.12/site-packages/pydantic/v1/schema.py
new file mode 100644
index 00000000..a91fe2cd
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pydantic/v1/schema.py
@@ -0,0 +1,1163 @@
+import re
+import warnings
+from collections import defaultdict
+from dataclasses import is_dataclass
+from datetime import date, datetime, time, timedelta
+from decimal import Decimal
+from enum import Enum
+from ipaddress import IPv4Address, IPv4Interface, IPv4Network, IPv6Address, IPv6Interface, IPv6Network
+from pathlib import Path
+from typing import (
+    TYPE_CHECKING,
+    Any,
+    Callable,
+    Dict,
+    ForwardRef,
+    FrozenSet,
+    Generic,
+    Iterable,
+    List,
+    Optional,
+    Pattern,
+    Sequence,
+    Set,
+    Tuple,
+    Type,
+    TypeVar,
+    Union,
+    cast,
+)
+from uuid import UUID
+
+from typing_extensions import Annotated, Literal
+
+from pydantic.v1.fields import (
+    MAPPING_LIKE_SHAPES,
+    SHAPE_DEQUE,
+    SHAPE_FROZENSET,
+    SHAPE_GENERIC,
+    SHAPE_ITERABLE,
+    SHAPE_LIST,
+    SHAPE_SEQUENCE,
+    SHAPE_SET,
+    SHAPE_SINGLETON,
+    SHAPE_TUPLE,
+    SHAPE_TUPLE_ELLIPSIS,
+    FieldInfo,
+    ModelField,
+)
+from pydantic.v1.json import pydantic_encoder
+from pydantic.v1.networks import AnyUrl, EmailStr
+from pydantic.v1.types import (
+    ConstrainedDecimal,
+    ConstrainedFloat,
+    ConstrainedFrozenSet,
+    ConstrainedInt,
+    ConstrainedList,
+    ConstrainedSet,
+    ConstrainedStr,
+    SecretBytes,
+    SecretStr,
+    StrictBytes,
+    StrictStr,
+    conbytes,
+    condecimal,
+    confloat,
+    confrozenset,
+    conint,
+    conlist,
+    conset,
+    constr,
+)
+from pydantic.v1.typing import (
+    all_literal_values,
+    get_args,
+    get_origin,
+    get_sub_types,
+    is_callable_type,
+    is_literal_type,
+    is_namedtuple,
+    is_none_type,
+    is_union,
+)
+from pydantic.v1.utils import ROOT_KEY, get_model, lenient_issubclass
+
+if TYPE_CHECKING:
+    from pydantic.v1.dataclasses import Dataclass
+    from pydantic.v1.main import BaseModel
+
+default_prefix = '#/definitions/'
+default_ref_template = '#/definitions/{model}'
+
+TypeModelOrEnum = Union[Type['BaseModel'], Type[Enum]]
+TypeModelSet = Set[TypeModelOrEnum]
+
+
+def _apply_modify_schema(
+    modify_schema: Callable[..., None], field: Optional[ModelField], field_schema: Dict[str, Any]
+) -> None:
+    from inspect import signature
+
+    sig = signature(modify_schema)
+    args = set(sig.parameters.keys())
+    if 'field' in args or 'kwargs' in args:
+        modify_schema(field_schema, field=field)
+    else:
+        modify_schema(field_schema)
+
+
+def schema(
+    models: Sequence[Union[Type['BaseModel'], Type['Dataclass']]],
+    *,
+    by_alias: bool = True,
+    title: Optional[str] = None,
+    description: Optional[str] = None,
+    ref_prefix: Optional[str] = None,
+    ref_template: str = default_ref_template,
+) -> Dict[str, Any]:
+    """
+    Process a list of models and generate a single JSON Schema with all of them defined in the ``definitions``
+    top-level JSON key, including their sub-models.
+
+    :param models: a list of models to include in the generated JSON Schema
+    :param by_alias: generate the schemas using the aliases defined, if any
+    :param title: title for the generated schema that includes the definitions
+    :param description: description for the generated schema
+    :param ref_prefix: the JSON Pointer prefix for schema references with ``$ref``, if None, will be set to the
+      default of ``#/definitions/``. Update it if you want the schemas to reference the definitions somewhere
+      else, e.g. for OpenAPI use ``#/components/schemas/``. The resulting generated schemas will still be at the
+      top-level key ``definitions``, so you can extract them from there. But all the references will have the set
+      prefix.
+    :param ref_template: Use a ``string.format()`` template for ``$ref`` instead of a prefix. This can be useful
+      for references that cannot be represented by ``ref_prefix`` such as a definition stored in another file. For
+      a sibling json file in a ``/schemas`` directory use ``"/schemas/${model}.json#"``.
+    :return: dict with the JSON Schema with a ``definitions`` top-level key including the schema definitions for
+      the models and sub-models passed in ``models``.
+    """
+    clean_models = [get_model(model) for model in models]
+    flat_models = get_flat_models_from_models(clean_models)
+    model_name_map = get_model_name_map(flat_models)
+    definitions = {}
+    output_schema: Dict[str, Any] = {}
+    if title:
+        output_schema['title'] = title
+    if description:
+        output_schema['description'] = description
+    for model in clean_models:
+        m_schema, m_definitions, m_nested_models = model_process_schema(
+            model,
+            by_alias=by_alias,
+            model_name_map=model_name_map,
+            ref_prefix=ref_prefix,
+            ref_template=ref_template,
+        )
+        definitions.update(m_definitions)
+        model_name = model_name_map[model]
+        definitions[model_name] = m_schema
+    if definitions:
+        output_schema['definitions'] = definitions
+    return output_schema
+
+
+def model_schema(
+    model: Union[Type['BaseModel'], Type['Dataclass']],
+    by_alias: bool = True,
+    ref_prefix: Optional[str] = None,
+    ref_template: str = default_ref_template,
+) -> Dict[str, Any]:
+    """
+    Generate a JSON Schema for one model. With all the sub-models defined in the ``definitions`` top-level
+    JSON key.
+
+    :param model: a Pydantic model (a class that inherits from BaseModel)
+    :param by_alias: generate the schemas using the aliases defined, if any
+    :param ref_prefix: the JSON Pointer prefix for schema references with ``$ref``, if None, will be set to the
+      default of ``#/definitions/``. Update it if you want the schemas to reference the definitions somewhere
+      else, e.g. for OpenAPI use ``#/components/schemas/``. The resulting generated schemas will still be at the
+      top-level key ``definitions``, so you can extract them from there. But all the references will have the set
+      prefix.
+    :param ref_template: Use a ``string.format()`` template for ``$ref`` instead of a prefix. This can be useful for
+      references that cannot be represented by ``ref_prefix`` such as a definition stored in another file. For a
+      sibling json file in a ``/schemas`` directory use ``"/schemas/${model}.json#"``.
+    :return: dict with the JSON Schema for the passed ``model``
+    """
+    model = get_model(model)
+    flat_models = get_flat_models_from_model(model)
+    model_name_map = get_model_name_map(flat_models)
+    model_name = model_name_map[model]
+    m_schema, m_definitions, nested_models = model_process_schema(
+        model, by_alias=by_alias, model_name_map=model_name_map, ref_prefix=ref_prefix, ref_template=ref_template
+    )
+    if model_name in nested_models:
+        # model_name is in Nested models, it has circular references
+        m_definitions[model_name] = m_schema
+        m_schema = get_schema_ref(model_name, ref_prefix, ref_template, False)
+    if m_definitions:
+        m_schema.update({'definitions': m_definitions})
+    return m_schema
+
+
+def get_field_info_schema(field: ModelField, schema_overrides: bool = False) -> Tuple[Dict[str, Any], bool]:
+    # If no title is explicitly set, we don't set title in the schema for enums.
+    # The behaviour is the same as `BaseModel` reference, where the default title
+    # is in the definitions part of the schema.
+    schema_: Dict[str, Any] = {}
+    if field.field_info.title or not lenient_issubclass(field.type_, Enum):
+        schema_['title'] = field.field_info.title or field.alias.title().replace('_', ' ')
+
+    if field.field_info.title:
+        schema_overrides = True
+
+    if field.field_info.description:
+        schema_['description'] = field.field_info.description
+        schema_overrides = True
+
+    if not field.required and field.default is not None and not is_callable_type(field.outer_type_):
+        schema_['default'] = encode_default(field.default)
+        schema_overrides = True
+
+    return schema_, schema_overrides
+
+
+def field_schema(
+    field: ModelField,
+    *,
+    by_alias: bool = True,
+    model_name_map: Dict[TypeModelOrEnum, str],
+    ref_prefix: Optional[str] = None,
+    ref_template: str = default_ref_template,
+    known_models: Optional[TypeModelSet] = None,
+) -> Tuple[Dict[str, Any], Dict[str, Any], Set[str]]:
+    """
+    Process a Pydantic field and return a tuple with a JSON Schema for it as the first item.
+    Also return a dictionary of definitions with models as keys and their schemas as values. If the passed field
+    is a model and has sub-models, and those sub-models don't have overrides (as ``title``, ``default``, etc), they
+    will be included in the definitions and referenced in the schema instead of included recursively.
+
+    :param field: a Pydantic ``ModelField``
+    :param by_alias: use the defined alias (if any) in the returned schema
+    :param model_name_map: used to generate the JSON Schema references to other models included in the definitions
+    :param ref_prefix: the JSON Pointer prefix to use for references to other schemas, if None, the default of
+      #/definitions/ will be used
+    :param ref_template: Use a ``string.format()`` template for ``$ref`` instead of a prefix. This can be useful for
+      references that cannot be represented by ``ref_prefix`` such as a definition stored in another file. For a
+      sibling json file in a ``/schemas`` directory use ``"/schemas/${model}.json#"``.
+    :param known_models: used to solve circular references
+    :return: tuple of the schema for this field and additional definitions
+    """
+    s, schema_overrides = get_field_info_schema(field)
+
+    validation_schema = get_field_schema_validations(field)
+    if validation_schema:
+        s.update(validation_schema)
+        schema_overrides = True
+
+    f_schema, f_definitions, f_nested_models = field_type_schema(
+        field,
+        by_alias=by_alias,
+        model_name_map=model_name_map,
+        schema_overrides=schema_overrides,
+        ref_prefix=ref_prefix,
+        ref_template=ref_template,
+        known_models=known_models or set(),
+    )
+
+    # $ref will only be returned when there are no schema_overrides
+    if '$ref' in f_schema:
+        return f_schema, f_definitions, f_nested_models
+    else:
+        s.update(f_schema)
+        return s, f_definitions, f_nested_models
+
+
+numeric_types = (int, float, Decimal)
+_str_types_attrs: Tuple[Tuple[str, Union[type, Tuple[type, ...]], str], ...] = (
+    ('max_length', numeric_types, 'maxLength'),
+    ('min_length', numeric_types, 'minLength'),
+    ('regex', str, 'pattern'),
+)
+
+_numeric_types_attrs: Tuple[Tuple[str, Union[type, Tuple[type, ...]], str], ...] = (
+    ('gt', numeric_types, 'exclusiveMinimum'),
+    ('lt', numeric_types, 'exclusiveMaximum'),
+    ('ge', numeric_types, 'minimum'),
+    ('le', numeric_types, 'maximum'),
+    ('multiple_of', numeric_types, 'multipleOf'),
+)
+
+
+def get_field_schema_validations(field: ModelField) -> Dict[str, Any]:
+    """
+    Get the JSON Schema validation keywords for a ``field`` with an annotation of
+    a Pydantic ``FieldInfo`` with validation arguments.
+    """
+    f_schema: Dict[str, Any] = {}
+
+    if lenient_issubclass(field.type_, Enum):
+        # schema is already updated by `enum_process_schema`; just update with field extra
+        if field.field_info.extra:
+            f_schema.update(field.field_info.extra)
+        return f_schema
+
+    if lenient_issubclass(field.type_, (str, bytes)):
+        for attr_name, t, keyword in _str_types_attrs:
+            attr = getattr(field.field_info, attr_name, None)
+            if isinstance(attr, t):
+                f_schema[keyword] = attr
+    if lenient_issubclass(field.type_, numeric_types) and not issubclass(field.type_, bool):
+        for attr_name, t, keyword in _numeric_types_attrs:
+            attr = getattr(field.field_info, attr_name, None)
+            if isinstance(attr, t):
+                f_schema[keyword] = attr
+    if field.field_info is not None and field.field_info.const:
+        f_schema['const'] = field.default
+    if field.field_info.extra:
+        f_schema.update(field.field_info.extra)
+    modify_schema = getattr(field.outer_type_, '__modify_schema__', None)
+    if modify_schema:
+        _apply_modify_schema(modify_schema, field, f_schema)
+    return f_schema
+
+
+def get_model_name_map(unique_models: TypeModelSet) -> Dict[TypeModelOrEnum, str]:
+    """
+    Process a set of models and generate unique names for them to be used as keys in the JSON Schema
+    definitions. By default the names are the same as the class name. But if two models in different Python
+    modules have the same name (e.g. "users.Model" and "items.Model"), the generated names will be
+    based on the Python module path for those conflicting models to prevent name collisions.
+
+    :param unique_models: a Python set of models
+    :return: dict mapping models to names
+    """
+    name_model_map = {}
+    conflicting_names: Set[str] = set()
+    for model in unique_models:
+        model_name = normalize_name(model.__name__)
+        if model_name in conflicting_names:
+            model_name = get_long_model_name(model)
+            name_model_map[model_name] = model
+        elif model_name in name_model_map:
+            conflicting_names.add(model_name)
+            conflicting_model = name_model_map.pop(model_name)
+            name_model_map[get_long_model_name(conflicting_model)] = conflicting_model
+            name_model_map[get_long_model_name(model)] = model
+        else:
+            name_model_map[model_name] = model
+    return {v: k for k, v in name_model_map.items()}
+
+
+def get_flat_models_from_model(model: Type['BaseModel'], known_models: Optional[TypeModelSet] = None) -> TypeModelSet:
+    """
+    Take a single ``model`` and generate a set with itself and all the sub-models in the tree. I.e. if you pass
+    model ``Foo`` (subclass of Pydantic ``BaseModel``) as ``model``, and it has a field of type ``Bar`` (also
+    subclass of ``BaseModel``) and that model ``Bar`` has a field of type ``Baz`` (also subclass of ``BaseModel``),
+    the return value will be ``set([Foo, Bar, Baz])``.
+
+    :param model: a Pydantic ``BaseModel`` subclass
+    :param known_models: used to solve circular references
+    :return: a set with the initial model and all its sub-models
+    """
+    known_models = known_models or set()
+    flat_models: TypeModelSet = set()
+    flat_models.add(model)
+    known_models |= flat_models
+    fields = cast(Sequence[ModelField], model.__fields__.values())
+    flat_models |= get_flat_models_from_fields(fields, known_models=known_models)
+    return flat_models
+
+
+def get_flat_models_from_field(field: ModelField, known_models: TypeModelSet) -> TypeModelSet:
+    """
+    Take a single Pydantic ``ModelField`` (from a model) that could have been declared as a subclass of BaseModel
+    (so, it could be a submodel), and generate a set with its model and all the sub-models in the tree.
+    I.e. if you pass a field that was declared to be of type ``Foo`` (subclass of BaseModel) as ``field``, and that
+    model ``Foo`` has a field of type ``Bar`` (also subclass of ``BaseModel``) and that model ``Bar`` has a field of
+    type ``Baz`` (also subclass of ``BaseModel``), the return value will be ``set([Foo, Bar, Baz])``.
+
+    :param field: a Pydantic ``ModelField``
+    :param known_models: used to solve circular references
+    :return: a set with the model used in the declaration for this field, if any, and all its sub-models
+    """
+    from pydantic.v1.main import BaseModel
+
+    flat_models: TypeModelSet = set()
+
+    field_type = field.type_
+    if lenient_issubclass(getattr(field_type, '__pydantic_model__', None), BaseModel):
+        field_type = field_type.__pydantic_model__
+
+    if field.sub_fields and not lenient_issubclass(field_type, BaseModel):
+        flat_models |= get_flat_models_from_fields(field.sub_fields, known_models=known_models)
+    elif lenient_issubclass(field_type, BaseModel) and field_type not in known_models:
+        flat_models |= get_flat_models_from_model(field_type, known_models=known_models)
+    elif lenient_issubclass(field_type, Enum):
+        flat_models.add(field_type)
+    return flat_models
+
+
+def get_flat_models_from_fields(fields: Sequence[ModelField], known_models: TypeModelSet) -> TypeModelSet:
+    """
+    Take a list of Pydantic  ``ModelField``s (from a model) that could have been declared as subclasses of ``BaseModel``
+    (so, any of them could be a submodel), and generate a set with their models and all the sub-models in the tree.
+    I.e. if you pass a the fields of a model ``Foo`` (subclass of ``BaseModel``) as ``fields``, and on of them has a
+    field of type ``Bar`` (also subclass of ``BaseModel``) and that model ``Bar`` has a field of type ``Baz`` (also
+    subclass of ``BaseModel``), the return value will be ``set([Foo, Bar, Baz])``.
+
+    :param fields: a list of Pydantic ``ModelField``s
+    :param known_models: used to solve circular references
+    :return: a set with any model declared in the fields, and all their sub-models
+    """
+    flat_models: TypeModelSet = set()
+    for field in fields:
+        flat_models |= get_flat_models_from_field(field, known_models=known_models)
+    return flat_models
+
+
+def get_flat_models_from_models(models: Sequence[Type['BaseModel']]) -> TypeModelSet:
+    """
+    Take a list of ``models`` and generate a set with them and all their sub-models in their trees. I.e. if you pass
+    a list of two models, ``Foo`` and ``Bar``, both subclasses of Pydantic ``BaseModel`` as models, and ``Bar`` has
+    a field of type ``Baz`` (also subclass of ``BaseModel``), the return value will be ``set([Foo, Bar, Baz])``.
+    """
+    flat_models: TypeModelSet = set()
+    for model in models:
+        flat_models |= get_flat_models_from_model(model)
+    return flat_models
+
+
+def get_long_model_name(model: TypeModelOrEnum) -> str:
+    return f'{model.__module__}__{model.__qualname__}'.replace('.', '__')
+
+
+def field_type_schema(
+    field: ModelField,
+    *,
+    by_alias: bool,
+    model_name_map: Dict[TypeModelOrEnum, str],
+    ref_template: str,
+    schema_overrides: bool = False,
+    ref_prefix: Optional[str] = None,
+    known_models: TypeModelSet,
+) -> Tuple[Dict[str, Any], Dict[str, Any], Set[str]]:
+    """
+    Used by ``field_schema()``, you probably should be using that function.
+
+    Take a single ``field`` and generate the schema for its type only, not including additional
+    information as title, etc. Also return additional schema definitions, from sub-models.
+    """
+    from pydantic.v1.main import BaseModel  # noqa: F811
+
+    definitions = {}
+    nested_models: Set[str] = set()
+    f_schema: Dict[str, Any]
+    if field.shape in {
+        SHAPE_LIST,
+        SHAPE_TUPLE_ELLIPSIS,
+        SHAPE_SEQUENCE,
+        SHAPE_SET,
+        SHAPE_FROZENSET,
+        SHAPE_ITERABLE,
+        SHAPE_DEQUE,
+    }:
+        items_schema, f_definitions, f_nested_models = field_singleton_schema(
+            field,
+            by_alias=by_alias,
+            model_name_map=model_name_map,
+            ref_prefix=ref_prefix,
+            ref_template=ref_template,
+            known_models=known_models,
+        )
+        definitions.update(f_definitions)
+        nested_models.update(f_nested_models)
+        f_schema = {'type': 'array', 'items': items_schema}
+        if field.shape in {SHAPE_SET, SHAPE_FROZENSET}:
+            f_schema['uniqueItems'] = True
+
+    elif field.shape in MAPPING_LIKE_SHAPES:
+        f_schema = {'type': 'object'}
+        key_field = cast(ModelField, field.key_field)
+        regex = getattr(key_field.type_, 'regex', None)
+        items_schema, f_definitions, f_nested_models = field_singleton_schema(
+            field,
+            by_alias=by_alias,
+            model_name_map=model_name_map,
+            ref_prefix=ref_prefix,
+            ref_template=ref_template,
+            known_models=known_models,
+        )
+        definitions.update(f_definitions)
+        nested_models.update(f_nested_models)
+        if regex:
+            # Dict keys have a regex pattern
+            # items_schema might be a schema or empty dict, add it either way
+            f_schema['patternProperties'] = {ConstrainedStr._get_pattern(regex): items_schema}
+        if items_schema:
+            # The dict values are not simply Any, so they need a schema
+            f_schema['additionalProperties'] = items_schema
+    elif field.shape == SHAPE_TUPLE or (field.shape == SHAPE_GENERIC and not issubclass(field.type_, BaseModel)):
+        sub_schema = []
+        sub_fields = cast(List[ModelField], field.sub_fields)
+        for sf in sub_fields:
+            sf_schema, sf_definitions, sf_nested_models = field_type_schema(
+                sf,
+                by_alias=by_alias,
+                model_name_map=model_name_map,
+                ref_prefix=ref_prefix,
+                ref_template=ref_template,
+                known_models=known_models,
+            )
+            definitions.update(sf_definitions)
+            nested_models.update(sf_nested_models)
+            sub_schema.append(sf_schema)
+
+        sub_fields_len = len(sub_fields)
+        if field.shape == SHAPE_GENERIC:
+            all_of_schemas = sub_schema[0] if sub_fields_len == 1 else {'type': 'array', 'items': sub_schema}
+            f_schema = {'allOf': [all_of_schemas]}
+        else:
+            f_schema = {
+                'type': 'array',
+                'minItems': sub_fields_len,
+                'maxItems': sub_fields_len,
+            }
+            if sub_fields_len >= 1:
+                f_schema['items'] = sub_schema
+    else:
+        assert field.shape in {SHAPE_SINGLETON, SHAPE_GENERIC}, field.shape
+        f_schema, f_definitions, f_nested_models = field_singleton_schema(
+            field,
+            by_alias=by_alias,
+            model_name_map=model_name_map,
+            schema_overrides=schema_overrides,
+            ref_prefix=ref_prefix,
+            ref_template=ref_template,
+            known_models=known_models,
+        )
+        definitions.update(f_definitions)
+        nested_models.update(f_nested_models)
+
+    # check field type to avoid repeated calls to the same __modify_schema__ method
+    if field.type_ != field.outer_type_:
+        if field.shape == SHAPE_GENERIC:
+            field_type = field.type_
+        else:
+            field_type = field.outer_type_
+        modify_schema = getattr(field_type, '__modify_schema__', None)
+        if modify_schema:
+            _apply_modify_schema(modify_schema, field, f_schema)
+    return f_schema, definitions, nested_models
+
+
+def model_process_schema(
+    model: TypeModelOrEnum,
+    *,
+    by_alias: bool = True,
+    model_name_map: Dict[TypeModelOrEnum, str],
+    ref_prefix: Optional[str] = None,
+    ref_template: str = default_ref_template,
+    known_models: Optional[TypeModelSet] = None,
+    field: Optional[ModelField] = None,
+) -> Tuple[Dict[str, Any], Dict[str, Any], Set[str]]:
+    """
+    Used by ``model_schema()``, you probably should be using that function.
+
+    Take a single ``model`` and generate its schema. Also return additional schema definitions, from sub-models. The
+    sub-models of the returned schema will be referenced, but their definitions will not be included in the schema. All
+    the definitions are returned as the second value.
+    """
+    from inspect import getdoc, signature
+
+    known_models = known_models or set()
+    if lenient_issubclass(model, Enum):
+        model = cast(Type[Enum], model)
+        s = enum_process_schema(model, field=field)
+        return s, {}, set()
+    model = cast(Type['BaseModel'], model)
+    s = {'title': model.__config__.title or model.__name__}
+    doc = getdoc(model)
+    if doc:
+        s['description'] = doc
+    known_models.add(model)
+    m_schema, m_definitions, nested_models = model_type_schema(
+        model,
+        by_alias=by_alias,
+        model_name_map=model_name_map,
+        ref_prefix=ref_prefix,
+        ref_template=ref_template,
+        known_models=known_models,
+    )
+    s.update(m_schema)
+    schema_extra = model.__config__.schema_extra
+    if callable(schema_extra):
+        if len(signature(schema_extra).parameters) == 1:
+            schema_extra(s)
+        else:
+            schema_extra(s, model)
+    else:
+        s.update(schema_extra)
+    return s, m_definitions, nested_models
+
+
+def model_type_schema(
+    model: Type['BaseModel'],
+    *,
+    by_alias: bool,
+    model_name_map: Dict[TypeModelOrEnum, str],
+    ref_template: str,
+    ref_prefix: Optional[str] = None,
+    known_models: TypeModelSet,
+) -> Tuple[Dict[str, Any], Dict[str, Any], Set[str]]:
+    """
+    You probably should be using ``model_schema()``, this function is indirectly used by that function.
+
+    Take a single ``model`` and generate the schema for its type only, not including additional
+    information as title, etc. Also return additional schema definitions, from sub-models.
+    """
+    properties = {}
+    required = []
+    definitions: Dict[str, Any] = {}
+    nested_models: Set[str] = set()
+    for k, f in model.__fields__.items():
+        try:
+            f_schema, f_definitions, f_nested_models = field_schema(
+                f,
+                by_alias=by_alias,
+                model_name_map=model_name_map,
+                ref_prefix=ref_prefix,
+                ref_template=ref_template,
+                known_models=known_models,
+            )
+        except SkipField as skip:
+            warnings.warn(skip.message, UserWarning)
+            continue
+        definitions.update(f_definitions)
+        nested_models.update(f_nested_models)
+        if by_alias:
+            properties[f.alias] = f_schema
+            if f.required:
+                required.append(f.alias)
+        else:
+            properties[k] = f_schema
+            if f.required:
+                required.append(k)
+    if ROOT_KEY in properties:
+        out_schema = properties[ROOT_KEY]
+        out_schema['title'] = model.__config__.title or model.__name__
+    else:
+        out_schema = {'type': 'object', 'properties': properties}
+        if required:
+            out_schema['required'] = required
+    if model.__config__.extra == 'forbid':
+        out_schema['additionalProperties'] = False
+    return out_schema, definitions, nested_models
+
+
+def enum_process_schema(enum: Type[Enum], *, field: Optional[ModelField] = None) -> Dict[str, Any]:
+    """
+    Take a single `enum` and generate its schema.
+
+    This is similar to the `model_process_schema` function, but applies to ``Enum`` objects.
+    """
+    import inspect
+
+    schema_: Dict[str, Any] = {
+        'title': enum.__name__,
+        # Python assigns all enums a default docstring value of 'An enumeration', so
+        # all enums will have a description field even if not explicitly provided.
+        'description': inspect.cleandoc(enum.__doc__ or 'An enumeration.'),
+        # Add enum values and the enum field type to the schema.
+        'enum': [item.value for item in cast(Iterable[Enum], enum)],
+    }
+
+    add_field_type_to_schema(enum, schema_)
+
+    modify_schema = getattr(enum, '__modify_schema__', None)
+    if modify_schema:
+        _apply_modify_schema(modify_schema, field, schema_)
+
+    return schema_
+
+
+def field_singleton_sub_fields_schema(
+    field: ModelField,
+    *,
+    by_alias: bool,
+    model_name_map: Dict[TypeModelOrEnum, str],
+    ref_template: str,
+    schema_overrides: bool = False,
+    ref_prefix: Optional[str] = None,
+    known_models: TypeModelSet,
+) -> Tuple[Dict[str, Any], Dict[str, Any], Set[str]]:
+    """
+    This function is indirectly used by ``field_schema()``, you probably should be using that function.
+
+    Take a list of Pydantic ``ModelField`` from the declaration of a type with parameters, and generate their
+    schema. I.e., fields used as "type parameters", like ``str`` and ``int`` in ``Tuple[str, int]``.
+    """
+    sub_fields = cast(List[ModelField], field.sub_fields)
+    definitions = {}
+    nested_models: Set[str] = set()
+    if len(sub_fields) == 1:
+        return field_type_schema(
+            sub_fields[0],
+            by_alias=by_alias,
+            model_name_map=model_name_map,
+            schema_overrides=schema_overrides,
+            ref_prefix=ref_prefix,
+            ref_template=ref_template,
+            known_models=known_models,
+        )
+    else:
+        s: Dict[str, Any] = {}
+        # https://github.com/OAI/OpenAPI-Specification/blob/master/versions/3.0.2.md#discriminator-object
+        field_has_discriminator: bool = field.discriminator_key is not None
+        if field_has_discriminator:
+            assert field.sub_fields_mapping is not None
+
+            discriminator_models_refs: Dict[str, Union[str, Dict[str, Any]]] = {}
+
+            for discriminator_value, sub_field in field.sub_fields_mapping.items():
+                if isinstance(discriminator_value, Enum):
+                    discriminator_value = str(discriminator_value.value)
+                # sub_field is either a `BaseModel` or directly an `Annotated` `Union` of many
+                if is_union(get_origin(sub_field.type_)):
+                    sub_models = get_sub_types(sub_field.type_)
+                    discriminator_models_refs[discriminator_value] = {
+                        model_name_map[sub_model]: get_schema_ref(
+                            model_name_map[sub_model], ref_prefix, ref_template, False
+                        )
+                        for sub_model in sub_models
+                    }
+                else:
+                    sub_field_type = sub_field.type_
+                    if hasattr(sub_field_type, '__pydantic_model__'):
+                        sub_field_type = sub_field_type.__pydantic_model__
+
+                    discriminator_model_name = model_name_map[sub_field_type]
+                    discriminator_model_ref = get_schema_ref(discriminator_model_name, ref_prefix, ref_template, False)
+                    discriminator_models_refs[discriminator_value] = discriminator_model_ref['$ref']
+
+            s['discriminator'] = {
+                'propertyName': field.discriminator_alias if by_alias else field.discriminator_key,
+                'mapping': discriminator_models_refs,
+            }
+
+        sub_field_schemas = []
+        for sf in sub_fields:
+            sub_schema, sub_definitions, sub_nested_models = field_type_schema(
+                sf,
+                by_alias=by_alias,
+                model_name_map=model_name_map,
+                schema_overrides=schema_overrides,
+                ref_prefix=ref_prefix,
+                ref_template=ref_template,
+                known_models=known_models,
+            )
+            definitions.update(sub_definitions)
+            if schema_overrides and 'allOf' in sub_schema:
+                # if the sub_field is a referenced schema we only need the referenced
+                # object. Otherwise we will end up with several allOf inside anyOf/oneOf.
+                # See https://github.com/pydantic/pydantic/issues/1209
+                sub_schema = sub_schema['allOf'][0]
+
+            if sub_schema.keys() == {'discriminator', 'oneOf'}:
+                # we don't want discriminator information inside oneOf choices, this is dealt with elsewhere
+                sub_schema.pop('discriminator')
+            sub_field_schemas.append(sub_schema)
+            nested_models.update(sub_nested_models)
+        s['oneOf' if field_has_discriminator else 'anyOf'] = sub_field_schemas
+        return s, definitions, nested_models
+
+
+# Order is important, e.g. subclasses of str must go before str
+# this is used only for standard library types, custom types should use __modify_schema__ instead
+field_class_to_schema: Tuple[Tuple[Any, Dict[str, Any]], ...] = (
+    (Path, {'type': 'string', 'format': 'path'}),
+    (datetime, {'type': 'string', 'format': 'date-time'}),
+    (date, {'type': 'string', 'format': 'date'}),
+    (time, {'type': 'string', 'format': 'time'}),
+    (timedelta, {'type': 'number', 'format': 'time-delta'}),
+    (IPv4Network, {'type': 'string', 'format': 'ipv4network'}),
+    (IPv6Network, {'type': 'string', 'format': 'ipv6network'}),
+    (IPv4Interface, {'type': 'string', 'format': 'ipv4interface'}),
+    (IPv6Interface, {'type': 'string', 'format': 'ipv6interface'}),
+    (IPv4Address, {'type': 'string', 'format': 'ipv4'}),
+    (IPv6Address, {'type': 'string', 'format': 'ipv6'}),
+    (Pattern, {'type': 'string', 'format': 'regex'}),
+    (str, {'type': 'string'}),
+    (bytes, {'type': 'string', 'format': 'binary'}),
+    (bool, {'type': 'boolean'}),
+    (int, {'type': 'integer'}),
+    (float, {'type': 'number'}),
+    (Decimal, {'type': 'number'}),
+    (UUID, {'type': 'string', 'format': 'uuid'}),
+    (dict, {'type': 'object'}),
+    (list, {'type': 'array', 'items': {}}),
+    (tuple, {'type': 'array', 'items': {}}),
+    (set, {'type': 'array', 'items': {}, 'uniqueItems': True}),
+    (frozenset, {'type': 'array', 'items': {}, 'uniqueItems': True}),
+)
+
+json_scheme = {'type': 'string', 'format': 'json-string'}
+
+
+def add_field_type_to_schema(field_type: Any, schema_: Dict[str, Any]) -> None:
+    """
+    Update the given `schema` with the type-specific metadata for the given `field_type`.
+
+    This function looks through `field_class_to_schema` for a class that matches the given `field_type`,
+    and then modifies the given `schema` with the information from that type.
+    """
+    for type_, t_schema in field_class_to_schema:
+        # Fallback for `typing.Pattern` and `re.Pattern` as they are not a valid class
+        if lenient_issubclass(field_type, type_) or field_type is type_ is Pattern:
+            schema_.update(t_schema)
+            break
+
+
+def get_schema_ref(name: str, ref_prefix: Optional[str], ref_template: str, schema_overrides: bool) -> Dict[str, Any]:
+    if ref_prefix:
+        schema_ref = {'$ref': ref_prefix + name}
+    else:
+        schema_ref = {'$ref': ref_template.format(model=name)}
+    return {'allOf': [schema_ref]} if schema_overrides else schema_ref
+
+
+def field_singleton_schema(  # noqa: C901 (ignore complexity)
+    field: ModelField,
+    *,
+    by_alias: bool,
+    model_name_map: Dict[TypeModelOrEnum, str],
+    ref_template: str,
+    schema_overrides: bool = False,
+    ref_prefix: Optional[str] = None,
+    known_models: TypeModelSet,
+) -> Tuple[Dict[str, Any], Dict[str, Any], Set[str]]:
+    """
+    This function is indirectly used by ``field_schema()``, you should probably be using that function.
+
+    Take a single Pydantic ``ModelField``, and return its schema and any additional definitions from sub-models.
+    """
+    from pydantic.v1.main import BaseModel
+
+    definitions: Dict[str, Any] = {}
+    nested_models: Set[str] = set()
+    field_type = field.type_
+
+    # Recurse into this field if it contains sub_fields and is NOT a
+    # BaseModel OR that BaseModel is a const
+    if field.sub_fields and (
+        (field.field_info and field.field_info.const) or not lenient_issubclass(field_type, BaseModel)
+    ):
+        return field_singleton_sub_fields_schema(
+            field,
+            by_alias=by_alias,
+            model_name_map=model_name_map,
+            schema_overrides=schema_overrides,
+            ref_prefix=ref_prefix,
+            ref_template=ref_template,
+            known_models=known_models,
+        )
+    if field_type is Any or field_type is object or field_type.__class__ == TypeVar or get_origin(field_type) is type:
+        return {}, definitions, nested_models  # no restrictions
+    if is_none_type(field_type):
+        return {'type': 'null'}, definitions, nested_models
+    if is_callable_type(field_type):
+        raise SkipField(f'Callable {field.name} was excluded from schema since JSON schema has no equivalent type.')
+    f_schema: Dict[str, Any] = {}
+    if field.field_info is not None and field.field_info.const:
+        f_schema['const'] = field.default
+
+    if is_literal_type(field_type):
+        values = tuple(x.value if isinstance(x, Enum) else x for x in all_literal_values(field_type))
+
+        if len({v.__class__ for v in values}) > 1:
+            return field_schema(
+                multitypes_literal_field_for_schema(values, field),
+                by_alias=by_alias,
+                model_name_map=model_name_map,
+                ref_prefix=ref_prefix,
+                ref_template=ref_template,
+                known_models=known_models,
+            )
+
+        # All values have the same type
+        field_type = values[0].__class__
+        f_schema['enum'] = list(values)
+        add_field_type_to_schema(field_type, f_schema)
+    elif lenient_issubclass(field_type, Enum):
+        enum_name = model_name_map[field_type]
+        f_schema, schema_overrides = get_field_info_schema(field, schema_overrides)
+        f_schema.update(get_schema_ref(enum_name, ref_prefix, ref_template, schema_overrides))
+        definitions[enum_name] = enum_process_schema(field_type, field=field)
+    elif is_namedtuple(field_type):
+        sub_schema, *_ = model_process_schema(
+            field_type.__pydantic_model__,
+            by_alias=by_alias,
+            model_name_map=model_name_map,
+            ref_prefix=ref_prefix,
+            ref_template=ref_template,
+            known_models=known_models,
+            field=field,
+        )
+        items_schemas = list(sub_schema['properties'].values())
+        f_schema.update(
+            {
+                'type': 'array',
+                'items': items_schemas,
+                'minItems': len(items_schemas),
+                'maxItems': len(items_schemas),
+            }
+        )
+    elif not hasattr(field_type, '__pydantic_model__'):
+        add_field_type_to_schema(field_type, f_schema)
+
+        modify_schema = getattr(field_type, '__modify_schema__', None)
+        if modify_schema:
+            _apply_modify_schema(modify_schema, field, f_schema)
+
+    if f_schema:
+        return f_schema, definitions, nested_models
+
+    # Handle dataclass-based models
+    if lenient_issubclass(getattr(field_type, '__pydantic_model__', None), BaseModel):
+        field_type = field_type.__pydantic_model__
+
+    if issubclass(field_type, BaseModel):
+        model_name = model_name_map[field_type]
+        if field_type not in known_models:
+            sub_schema, sub_definitions, sub_nested_models = model_process_schema(
+                field_type,
+                by_alias=by_alias,
+                model_name_map=model_name_map,
+                ref_prefix=ref_prefix,
+                ref_template=ref_template,
+                known_models=known_models,
+                field=field,
+            )
+            definitions.update(sub_definitions)
+            definitions[model_name] = sub_schema
+            nested_models.update(sub_nested_models)
+        else:
+            nested_models.add(model_name)
+        schema_ref = get_schema_ref(model_name, ref_prefix, ref_template, schema_overrides)
+        return schema_ref, definitions, nested_models
+
+    # For generics with no args
+    args = get_args(field_type)
+    if args is not None and not args and Generic in field_type.__bases__:
+        return f_schema, definitions, nested_models
+
+    raise ValueError(f'Value not declarable with JSON Schema, field: {field}')
+
+
+def multitypes_literal_field_for_schema(values: Tuple[Any, ...], field: ModelField) -> ModelField:
+    """
+    To support `Literal` with values of different types, we split it into multiple `Literal` with same type
+    e.g. `Literal['qwe', 'asd', 1, 2]` becomes `Union[Literal['qwe', 'asd'], Literal[1, 2]]`
+    """
+    literal_distinct_types = defaultdict(list)
+    for v in values:
+        literal_distinct_types[v.__class__].append(v)
+    distinct_literals = (Literal[tuple(same_type_values)] for same_type_values in literal_distinct_types.values())
+
+    return ModelField(
+        name=field.name,
+        type_=Union[tuple(distinct_literals)],  # type: ignore
+        class_validators=field.class_validators,
+        model_config=field.model_config,
+        default=field.default,
+        required=field.required,
+        alias=field.alias,
+        field_info=field.field_info,
+    )
+
+
+def encode_default(dft: Any) -> Any:
+    from pydantic.v1.main import BaseModel
+
+    if isinstance(dft, BaseModel) or is_dataclass(dft):
+        dft = cast('dict[str, Any]', pydantic_encoder(dft))
+
+    if isinstance(dft, dict):
+        return {encode_default(k): encode_default(v) for k, v in dft.items()}
+    elif isinstance(dft, Enum):
+        return dft.value
+    elif isinstance(dft, (int, float, str)):
+        return dft
+    elif isinstance(dft, (list, tuple)):
+        t = dft.__class__
+        seq_args = (encode_default(v) for v in dft)
+        return t(*seq_args) if is_namedtuple(t) else t(seq_args)
+    elif dft is None:
+        return None
+    else:
+        return pydantic_encoder(dft)
+
+
+_map_types_constraint: Dict[Any, Callable[..., type]] = {int: conint, float: confloat, Decimal: condecimal}
+
+
+def get_annotation_from_field_info(
+    annotation: Any, field_info: FieldInfo, field_name: str, validate_assignment: bool = False
+) -> Type[Any]:
+    """
+    Get an annotation with validation implemented for numbers and strings based on the field_info.
+    :param annotation: an annotation from a field specification, as ``str``, ``ConstrainedStr``
+    :param field_info: an instance of FieldInfo, possibly with declarations for validations and JSON Schema
+    :param field_name: name of the field for use in error messages
+    :param validate_assignment: default False, flag for BaseModel Config value of validate_assignment
+    :return: the same ``annotation`` if unmodified or a new annotation with validation in place
+    """
+    constraints = field_info.get_constraints()
+    used_constraints: Set[str] = set()
+    if constraints:
+        annotation, used_constraints = get_annotation_with_constraints(annotation, field_info)
+    if validate_assignment:
+        used_constraints.add('allow_mutation')
+
+    unused_constraints = constraints - used_constraints
+    if unused_constraints:
+        raise ValueError(
+            f'On field "{field_name}" the following field constraints are set but not enforced: '
+            f'{", ".join(unused_constraints)}. '
+            f'\nFor more details see https://docs.pydantic.dev/usage/schema/#unenforced-field-constraints'
+        )
+
+    return annotation
+
+
+def get_annotation_with_constraints(annotation: Any, field_info: FieldInfo) -> Tuple[Type[Any], Set[str]]:  # noqa: C901
+    """
+    Get an annotation with used constraints implemented for numbers and strings based on the field_info.
+
+    :param annotation: an annotation from a field specification, as ``str``, ``ConstrainedStr``
+    :param field_info: an instance of FieldInfo, possibly with declarations for validations and JSON Schema
+    :return: the same ``annotation`` if unmodified or a new annotation along with the used constraints.
+    """
+    used_constraints: Set[str] = set()
+
+    def go(type_: Any) -> Type[Any]:
+        if (
+            is_literal_type(type_)
+            or isinstance(type_, ForwardRef)
+            or lenient_issubclass(type_, (ConstrainedList, ConstrainedSet, ConstrainedFrozenSet))
+        ):
+            return type_
+        origin = get_origin(type_)
+        if origin is not None:
+            args: Tuple[Any, ...] = get_args(type_)
+            if any(isinstance(a, ForwardRef) for a in args):
+                # forward refs cause infinite recursion below
+                return type_
+
+            if origin is Annotated:
+                return go(args[0])
+            if is_union(origin):
+                return Union[tuple(go(a) for a in args)]  # type: ignore
+
+            if issubclass(origin, List) and (
+                field_info.min_items is not None
+                or field_info.max_items is not None
+                or field_info.unique_items is not None
+            ):
+                used_constraints.update({'min_items', 'max_items', 'unique_items'})
+                return conlist(
+                    go(args[0]),
+                    min_items=field_info.min_items,
+                    max_items=field_info.max_items,
+                    unique_items=field_info.unique_items,
+                )
+
+            if issubclass(origin, Set) and (field_info.min_items is not None or field_info.max_items is not None):
+                used_constraints.update({'min_items', 'max_items'})
+                return conset(go(args[0]), min_items=field_info.min_items, max_items=field_info.max_items)
+
+            if issubclass(origin, FrozenSet) and (field_info.min_items is not None or field_info.max_items is not None):
+                used_constraints.update({'min_items', 'max_items'})
+                return confrozenset(go(args[0]), min_items=field_info.min_items, max_items=field_info.max_items)
+
+            for t in (Tuple, List, Set, FrozenSet, Sequence):
+                if issubclass(origin, t):  # type: ignore
+                    return t[tuple(go(a) for a in args)]  # type: ignore
+
+            if issubclass(origin, Dict):
+                return Dict[args[0], go(args[1])]  # type: ignore
+
+        attrs: Optional[Tuple[str, ...]] = None
+        constraint_func: Optional[Callable[..., type]] = None
+        if isinstance(type_, type):
+            if issubclass(type_, (SecretStr, SecretBytes)):
+                attrs = ('max_length', 'min_length')
+
+                def constraint_func(**kw: Any) -> Type[Any]:  # noqa: F811
+                    return type(type_.__name__, (type_,), kw)
+
+            elif issubclass(type_, str) and not issubclass(type_, (EmailStr, AnyUrl)):
+                attrs = ('max_length', 'min_length', 'regex')
+                if issubclass(type_, StrictStr):
+
+                    def constraint_func(**kw: Any) -> Type[Any]:
+                        return type(type_.__name__, (type_,), kw)
+
+                else:
+                    constraint_func = constr
+            elif issubclass(type_, bytes):
+                attrs = ('max_length', 'min_length', 'regex')
+                if issubclass(type_, StrictBytes):
+
+                    def constraint_func(**kw: Any) -> Type[Any]:
+                        return type(type_.__name__, (type_,), kw)
+
+                else:
+                    constraint_func = conbytes
+            elif issubclass(type_, numeric_types) and not issubclass(
+                type_,
+                (
+                    ConstrainedInt,
+                    ConstrainedFloat,
+                    ConstrainedDecimal,
+                    ConstrainedList,
+                    ConstrainedSet,
+                    ConstrainedFrozenSet,
+                    bool,
+                ),
+            ):
+                # Is numeric type
+                attrs = ('gt', 'lt', 'ge', 'le', 'multiple_of')
+                if issubclass(type_, float):
+                    attrs += ('allow_inf_nan',)
+                if issubclass(type_, Decimal):
+                    attrs += ('max_digits', 'decimal_places')
+                numeric_type = next(t for t in numeric_types if issubclass(type_, t))  # pragma: no branch
+                constraint_func = _map_types_constraint[numeric_type]
+
+        if attrs:
+            used_constraints.update(set(attrs))
+            kwargs = {
+                attr_name: attr
+                for attr_name, attr in ((attr_name, getattr(field_info, attr_name)) for attr_name in attrs)
+                if attr is not None
+            }
+            if kwargs:
+                constraint_func = cast(Callable[..., type], constraint_func)
+                return constraint_func(**kwargs)
+        return type_
+
+    return go(annotation), used_constraints
+
+
+def normalize_name(name: str) -> str:
+    """
+    Normalizes the given name. This can be applied to either a model *or* enum.
+    """
+    return re.sub(r'[^a-zA-Z0-9.\-_]', '_', name)
+
+
+class SkipField(Exception):
+    """
+    Utility exception used to exclude fields from schema.
+    """
+
+    def __init__(self, message: str) -> None:
+        self.message = message
diff --git a/.venv/lib/python3.12/site-packages/pydantic/v1/tools.py b/.venv/lib/python3.12/site-packages/pydantic/v1/tools.py
new file mode 100644
index 00000000..6838a23e
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pydantic/v1/tools.py
@@ -0,0 +1,92 @@
+import json
+from functools import lru_cache
+from pathlib import Path
+from typing import TYPE_CHECKING, Any, Callable, Optional, Type, TypeVar, Union
+
+from pydantic.v1.parse import Protocol, load_file, load_str_bytes
+from pydantic.v1.types import StrBytes
+from pydantic.v1.typing import display_as_type
+
+__all__ = ('parse_file_as', 'parse_obj_as', 'parse_raw_as', 'schema_of', 'schema_json_of')
+
+NameFactory = Union[str, Callable[[Type[Any]], str]]
+
+if TYPE_CHECKING:
+    from pydantic.v1.typing import DictStrAny
+
+
+def _generate_parsing_type_name(type_: Any) -> str:
+    return f'ParsingModel[{display_as_type(type_)}]'
+
+
+@lru_cache(maxsize=2048)
+def _get_parsing_type(type_: Any, *, type_name: Optional[NameFactory] = None) -> Any:
+    from pydantic.v1.main import create_model
+
+    if type_name is None:
+        type_name = _generate_parsing_type_name
+    if not isinstance(type_name, str):
+        type_name = type_name(type_)
+    return create_model(type_name, __root__=(type_, ...))
+
+
+T = TypeVar('T')
+
+
+def parse_obj_as(type_: Type[T], obj: Any, *, type_name: Optional[NameFactory] = None) -> T:
+    model_type = _get_parsing_type(type_, type_name=type_name)  # type: ignore[arg-type]
+    return model_type(__root__=obj).__root__
+
+
+def parse_file_as(
+    type_: Type[T],
+    path: Union[str, Path],
+    *,
+    content_type: str = None,
+    encoding: str = 'utf8',
+    proto: Protocol = None,
+    allow_pickle: bool = False,
+    json_loads: Callable[[str], Any] = json.loads,
+    type_name: Optional[NameFactory] = None,
+) -> T:
+    obj = load_file(
+        path,
+        proto=proto,
+        content_type=content_type,
+        encoding=encoding,
+        allow_pickle=allow_pickle,
+        json_loads=json_loads,
+    )
+    return parse_obj_as(type_, obj, type_name=type_name)
+
+
+def parse_raw_as(
+    type_: Type[T],
+    b: StrBytes,
+    *,
+    content_type: str = None,
+    encoding: str = 'utf8',
+    proto: Protocol = None,
+    allow_pickle: bool = False,
+    json_loads: Callable[[str], Any] = json.loads,
+    type_name: Optional[NameFactory] = None,
+) -> T:
+    obj = load_str_bytes(
+        b,
+        proto=proto,
+        content_type=content_type,
+        encoding=encoding,
+        allow_pickle=allow_pickle,
+        json_loads=json_loads,
+    )
+    return parse_obj_as(type_, obj, type_name=type_name)
+
+
+def schema_of(type_: Any, *, title: Optional[NameFactory] = None, **schema_kwargs: Any) -> 'DictStrAny':
+    """Generate a JSON schema (as dict) for the passed model or dynamically generated one"""
+    return _get_parsing_type(type_, type_name=title).schema(**schema_kwargs)
+
+
+def schema_json_of(type_: Any, *, title: Optional[NameFactory] = None, **schema_json_kwargs: Any) -> str:
+    """Generate a JSON schema (as JSON) for the passed model or dynamically generated one"""
+    return _get_parsing_type(type_, type_name=title).schema_json(**schema_json_kwargs)
diff --git a/.venv/lib/python3.12/site-packages/pydantic/v1/types.py b/.venv/lib/python3.12/site-packages/pydantic/v1/types.py
new file mode 100644
index 00000000..0cd789a4
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pydantic/v1/types.py
@@ -0,0 +1,1205 @@
+import abc
+import math
+import re
+import warnings
+from datetime import date
+from decimal import Decimal, InvalidOperation
+from enum import Enum
+from pathlib import Path
+from types import new_class
+from typing import (
+    TYPE_CHECKING,
+    Any,
+    Callable,
+    ClassVar,
+    Dict,
+    FrozenSet,
+    List,
+    Optional,
+    Pattern,
+    Set,
+    Tuple,
+    Type,
+    TypeVar,
+    Union,
+    cast,
+    overload,
+)
+from uuid import UUID
+from weakref import WeakSet
+
+from pydantic.v1 import errors
+from pydantic.v1.datetime_parse import parse_date
+from pydantic.v1.utils import import_string, update_not_none
+from pydantic.v1.validators import (
+    bytes_validator,
+    constr_length_validator,
+    constr_lower,
+    constr_strip_whitespace,
+    constr_upper,
+    decimal_validator,
+    float_finite_validator,
+    float_validator,
+    frozenset_validator,
+    int_validator,
+    list_validator,
+    number_multiple_validator,
+    number_size_validator,
+    path_exists_validator,
+    path_validator,
+    set_validator,
+    str_validator,
+    strict_bytes_validator,
+    strict_float_validator,
+    strict_int_validator,
+    strict_str_validator,
+)
+
+__all__ = [
+    'NoneStr',
+    'NoneBytes',
+    'StrBytes',
+    'NoneStrBytes',
+    'StrictStr',
+    'ConstrainedBytes',
+    'conbytes',
+    'ConstrainedList',
+    'conlist',
+    'ConstrainedSet',
+    'conset',
+    'ConstrainedFrozenSet',
+    'confrozenset',
+    'ConstrainedStr',
+    'constr',
+    'PyObject',
+    'ConstrainedInt',
+    'conint',
+    'PositiveInt',
+    'NegativeInt',
+    'NonNegativeInt',
+    'NonPositiveInt',
+    'ConstrainedFloat',
+    'confloat',
+    'PositiveFloat',
+    'NegativeFloat',
+    'NonNegativeFloat',
+    'NonPositiveFloat',
+    'FiniteFloat',
+    'ConstrainedDecimal',
+    'condecimal',
+    'UUID1',
+    'UUID3',
+    'UUID4',
+    'UUID5',
+    'FilePath',
+    'DirectoryPath',
+    'Json',
+    'JsonWrapper',
+    'SecretField',
+    'SecretStr',
+    'SecretBytes',
+    'StrictBool',
+    'StrictBytes',
+    'StrictInt',
+    'StrictFloat',
+    'PaymentCardNumber',
+    'ByteSize',
+    'PastDate',
+    'FutureDate',
+    'ConstrainedDate',
+    'condate',
+]
+
+NoneStr = Optional[str]
+NoneBytes = Optional[bytes]
+StrBytes = Union[str, bytes]
+NoneStrBytes = Optional[StrBytes]
+OptionalInt = Optional[int]
+OptionalIntFloat = Union[OptionalInt, float]
+OptionalIntFloatDecimal = Union[OptionalIntFloat, Decimal]
+OptionalDate = Optional[date]
+StrIntFloat = Union[str, int, float]
+
+if TYPE_CHECKING:
+    from typing_extensions import Annotated
+
+    from pydantic.v1.dataclasses import Dataclass
+    from pydantic.v1.main import BaseModel
+    from pydantic.v1.typing import CallableGenerator
+
+    ModelOrDc = Type[Union[BaseModel, Dataclass]]
+
+T = TypeVar('T')
+_DEFINED_TYPES: 'WeakSet[type]' = WeakSet()
+
+
+@overload
+def _registered(typ: Type[T]) -> Type[T]:
+    pass
+
+
+@overload
+def _registered(typ: 'ConstrainedNumberMeta') -> 'ConstrainedNumberMeta':
+    pass
+
+
+def _registered(typ: Union[Type[T], 'ConstrainedNumberMeta']) -> Union[Type[T], 'ConstrainedNumberMeta']:
+    # In order to generate valid examples of constrained types, Hypothesis needs
+    # to inspect the type object - so we keep a weakref to each contype object
+    # until it can be registered.  When (or if) our Hypothesis plugin is loaded,
+    # it monkeypatches this function.
+    # If Hypothesis is never used, the total effect is to keep a weak reference
+    # which has minimal memory usage and doesn't even affect garbage collection.
+    _DEFINED_TYPES.add(typ)
+    return typ
+
+
+class ConstrainedNumberMeta(type):
+    def __new__(cls, name: str, bases: Any, dct: Dict[str, Any]) -> 'ConstrainedInt':  # type: ignore
+        new_cls = cast('ConstrainedInt', type.__new__(cls, name, bases, dct))
+
+        if new_cls.gt is not None and new_cls.ge is not None:
+            raise errors.ConfigError('bounds gt and ge cannot be specified at the same time')
+        if new_cls.lt is not None and new_cls.le is not None:
+            raise errors.ConfigError('bounds lt and le cannot be specified at the same time')
+
+        return _registered(new_cls)  # type: ignore
+
+
+# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ BOOLEAN TYPES ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+if TYPE_CHECKING:
+    StrictBool = bool
+else:
+
+    class StrictBool(int):
+        """
+        StrictBool to allow for bools which are not type-coerced.
+        """
+
+        @classmethod
+        def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None:
+            field_schema.update(type='boolean')
+
+        @classmethod
+        def __get_validators__(cls) -> 'CallableGenerator':
+            yield cls.validate
+
+        @classmethod
+        def validate(cls, value: Any) -> bool:
+            """
+            Ensure that we only allow bools.
+            """
+            if isinstance(value, bool):
+                return value
+
+            raise errors.StrictBoolError()
+
+
+# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ INTEGER TYPES ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+
+class ConstrainedInt(int, metaclass=ConstrainedNumberMeta):
+    strict: bool = False
+    gt: OptionalInt = None
+    ge: OptionalInt = None
+    lt: OptionalInt = None
+    le: OptionalInt = None
+    multiple_of: OptionalInt = None
+
+    @classmethod
+    def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None:
+        update_not_none(
+            field_schema,
+            exclusiveMinimum=cls.gt,
+            exclusiveMaximum=cls.lt,
+            minimum=cls.ge,
+            maximum=cls.le,
+            multipleOf=cls.multiple_of,
+        )
+
+    @classmethod
+    def __get_validators__(cls) -> 'CallableGenerator':
+        yield strict_int_validator if cls.strict else int_validator
+        yield number_size_validator
+        yield number_multiple_validator
+
+
+def conint(
+    *,
+    strict: bool = False,
+    gt: Optional[int] = None,
+    ge: Optional[int] = None,
+    lt: Optional[int] = None,
+    le: Optional[int] = None,
+    multiple_of: Optional[int] = None,
+) -> Type[int]:
+    # use kwargs then define conf in a dict to aid with IDE type hinting
+    namespace = dict(strict=strict, gt=gt, ge=ge, lt=lt, le=le, multiple_of=multiple_of)
+    return type('ConstrainedIntValue', (ConstrainedInt,), namespace)
+
+
+if TYPE_CHECKING:
+    PositiveInt = int
+    NegativeInt = int
+    NonPositiveInt = int
+    NonNegativeInt = int
+    StrictInt = int
+else:
+
+    class PositiveInt(ConstrainedInt):
+        gt = 0
+
+    class NegativeInt(ConstrainedInt):
+        lt = 0
+
+    class NonPositiveInt(ConstrainedInt):
+        le = 0
+
+    class NonNegativeInt(ConstrainedInt):
+        ge = 0
+
+    class StrictInt(ConstrainedInt):
+        strict = True
+
+
+# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ FLOAT TYPES ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+
+class ConstrainedFloat(float, metaclass=ConstrainedNumberMeta):
+    strict: bool = False
+    gt: OptionalIntFloat = None
+    ge: OptionalIntFloat = None
+    lt: OptionalIntFloat = None
+    le: OptionalIntFloat = None
+    multiple_of: OptionalIntFloat = None
+    allow_inf_nan: Optional[bool] = None
+
+    @classmethod
+    def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None:
+        update_not_none(
+            field_schema,
+            exclusiveMinimum=cls.gt,
+            exclusiveMaximum=cls.lt,
+            minimum=cls.ge,
+            maximum=cls.le,
+            multipleOf=cls.multiple_of,
+        )
+        # Modify constraints to account for differences between IEEE floats and JSON
+        if field_schema.get('exclusiveMinimum') == -math.inf:
+            del field_schema['exclusiveMinimum']
+        if field_schema.get('minimum') == -math.inf:
+            del field_schema['minimum']
+        if field_schema.get('exclusiveMaximum') == math.inf:
+            del field_schema['exclusiveMaximum']
+        if field_schema.get('maximum') == math.inf:
+            del field_schema['maximum']
+
+    @classmethod
+    def __get_validators__(cls) -> 'CallableGenerator':
+        yield strict_float_validator if cls.strict else float_validator
+        yield number_size_validator
+        yield number_multiple_validator
+        yield float_finite_validator
+
+
+def confloat(
+    *,
+    strict: bool = False,
+    gt: float = None,
+    ge: float = None,
+    lt: float = None,
+    le: float = None,
+    multiple_of: float = None,
+    allow_inf_nan: Optional[bool] = None,
+) -> Type[float]:
+    # use kwargs then define conf in a dict to aid with IDE type hinting
+    namespace = dict(strict=strict, gt=gt, ge=ge, lt=lt, le=le, multiple_of=multiple_of, allow_inf_nan=allow_inf_nan)
+    return type('ConstrainedFloatValue', (ConstrainedFloat,), namespace)
+
+
+if TYPE_CHECKING:
+    PositiveFloat = float
+    NegativeFloat = float
+    NonPositiveFloat = float
+    NonNegativeFloat = float
+    StrictFloat = float
+    FiniteFloat = float
+else:
+
+    class PositiveFloat(ConstrainedFloat):
+        gt = 0
+
+    class NegativeFloat(ConstrainedFloat):
+        lt = 0
+
+    class NonPositiveFloat(ConstrainedFloat):
+        le = 0
+
+    class NonNegativeFloat(ConstrainedFloat):
+        ge = 0
+
+    class StrictFloat(ConstrainedFloat):
+        strict = True
+
+    class FiniteFloat(ConstrainedFloat):
+        allow_inf_nan = False
+
+
+# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ BYTES TYPES ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+
+class ConstrainedBytes(bytes):
+    strip_whitespace = False
+    to_upper = False
+    to_lower = False
+    min_length: OptionalInt = None
+    max_length: OptionalInt = None
+    strict: bool = False
+
+    @classmethod
+    def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None:
+        update_not_none(field_schema, minLength=cls.min_length, maxLength=cls.max_length)
+
+    @classmethod
+    def __get_validators__(cls) -> 'CallableGenerator':
+        yield strict_bytes_validator if cls.strict else bytes_validator
+        yield constr_strip_whitespace
+        yield constr_upper
+        yield constr_lower
+        yield constr_length_validator
+
+
+def conbytes(
+    *,
+    strip_whitespace: bool = False,
+    to_upper: bool = False,
+    to_lower: bool = False,
+    min_length: Optional[int] = None,
+    max_length: Optional[int] = None,
+    strict: bool = False,
+) -> Type[bytes]:
+    # use kwargs then define conf in a dict to aid with IDE type hinting
+    namespace = dict(
+        strip_whitespace=strip_whitespace,
+        to_upper=to_upper,
+        to_lower=to_lower,
+        min_length=min_length,
+        max_length=max_length,
+        strict=strict,
+    )
+    return _registered(type('ConstrainedBytesValue', (ConstrainedBytes,), namespace))
+
+
+if TYPE_CHECKING:
+    StrictBytes = bytes
+else:
+
+    class StrictBytes(ConstrainedBytes):
+        strict = True
+
+
+# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ STRING TYPES ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+
+class ConstrainedStr(str):
+    strip_whitespace = False
+    to_upper = False
+    to_lower = False
+    min_length: OptionalInt = None
+    max_length: OptionalInt = None
+    curtail_length: OptionalInt = None
+    regex: Optional[Union[str, Pattern[str]]] = None
+    strict = False
+
+    @classmethod
+    def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None:
+        update_not_none(
+            field_schema,
+            minLength=cls.min_length,
+            maxLength=cls.max_length,
+            pattern=cls.regex and cls._get_pattern(cls.regex),
+        )
+
+    @classmethod
+    def __get_validators__(cls) -> 'CallableGenerator':
+        yield strict_str_validator if cls.strict else str_validator
+        yield constr_strip_whitespace
+        yield constr_upper
+        yield constr_lower
+        yield constr_length_validator
+        yield cls.validate
+
+    @classmethod
+    def validate(cls, value: Union[str]) -> Union[str]:
+        if cls.curtail_length and len(value) > cls.curtail_length:
+            value = value[: cls.curtail_length]
+
+        if cls.regex:
+            if not re.match(cls.regex, value):
+                raise errors.StrRegexError(pattern=cls._get_pattern(cls.regex))
+
+        return value
+
+    @staticmethod
+    def _get_pattern(regex: Union[str, Pattern[str]]) -> str:
+        return regex if isinstance(regex, str) else regex.pattern
+
+
+def constr(
+    *,
+    strip_whitespace: bool = False,
+    to_upper: bool = False,
+    to_lower: bool = False,
+    strict: bool = False,
+    min_length: Optional[int] = None,
+    max_length: Optional[int] = None,
+    curtail_length: Optional[int] = None,
+    regex: Optional[str] = None,
+) -> Type[str]:
+    # use kwargs then define conf in a dict to aid with IDE type hinting
+    namespace = dict(
+        strip_whitespace=strip_whitespace,
+        to_upper=to_upper,
+        to_lower=to_lower,
+        strict=strict,
+        min_length=min_length,
+        max_length=max_length,
+        curtail_length=curtail_length,
+        regex=regex and re.compile(regex),
+    )
+    return _registered(type('ConstrainedStrValue', (ConstrainedStr,), namespace))
+
+
+if TYPE_CHECKING:
+    StrictStr = str
+else:
+
+    class StrictStr(ConstrainedStr):
+        strict = True
+
+
+# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ SET TYPES ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+
+# This types superclass should be Set[T], but cython chokes on that...
+class ConstrainedSet(set):  # type: ignore
+    # Needed for pydantic to detect that this is a set
+    __origin__ = set
+    __args__: Set[Type[T]]  # type: ignore
+
+    min_items: Optional[int] = None
+    max_items: Optional[int] = None
+    item_type: Type[T]  # type: ignore
+
+    @classmethod
+    def __get_validators__(cls) -> 'CallableGenerator':
+        yield cls.set_length_validator
+
+    @classmethod
+    def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None:
+        update_not_none(field_schema, minItems=cls.min_items, maxItems=cls.max_items)
+
+    @classmethod
+    def set_length_validator(cls, v: 'Optional[Set[T]]') -> 'Optional[Set[T]]':
+        if v is None:
+            return None
+
+        v = set_validator(v)
+        v_len = len(v)
+
+        if cls.min_items is not None and v_len < cls.min_items:
+            raise errors.SetMinLengthError(limit_value=cls.min_items)
+
+        if cls.max_items is not None and v_len > cls.max_items:
+            raise errors.SetMaxLengthError(limit_value=cls.max_items)
+
+        return v
+
+
+def conset(item_type: Type[T], *, min_items: Optional[int] = None, max_items: Optional[int] = None) -> Type[Set[T]]:
+    # __args__ is needed to conform to typing generics api
+    namespace = {'min_items': min_items, 'max_items': max_items, 'item_type': item_type, '__args__': [item_type]}
+    # We use new_class to be able to deal with Generic types
+    return new_class('ConstrainedSetValue', (ConstrainedSet,), {}, lambda ns: ns.update(namespace))
+
+
+# This types superclass should be FrozenSet[T], but cython chokes on that...
+class ConstrainedFrozenSet(frozenset):  # type: ignore
+    # Needed for pydantic to detect that this is a set
+    __origin__ = frozenset
+    __args__: FrozenSet[Type[T]]  # type: ignore
+
+    min_items: Optional[int] = None
+    max_items: Optional[int] = None
+    item_type: Type[T]  # type: ignore
+
+    @classmethod
+    def __get_validators__(cls) -> 'CallableGenerator':
+        yield cls.frozenset_length_validator
+
+    @classmethod
+    def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None:
+        update_not_none(field_schema, minItems=cls.min_items, maxItems=cls.max_items)
+
+    @classmethod
+    def frozenset_length_validator(cls, v: 'Optional[FrozenSet[T]]') -> 'Optional[FrozenSet[T]]':
+        if v is None:
+            return None
+
+        v = frozenset_validator(v)
+        v_len = len(v)
+
+        if cls.min_items is not None and v_len < cls.min_items:
+            raise errors.FrozenSetMinLengthError(limit_value=cls.min_items)
+
+        if cls.max_items is not None and v_len > cls.max_items:
+            raise errors.FrozenSetMaxLengthError(limit_value=cls.max_items)
+
+        return v
+
+
+def confrozenset(
+    item_type: Type[T], *, min_items: Optional[int] = None, max_items: Optional[int] = None
+) -> Type[FrozenSet[T]]:
+    # __args__ is needed to conform to typing generics api
+    namespace = {'min_items': min_items, 'max_items': max_items, 'item_type': item_type, '__args__': [item_type]}
+    # We use new_class to be able to deal with Generic types
+    return new_class('ConstrainedFrozenSetValue', (ConstrainedFrozenSet,), {}, lambda ns: ns.update(namespace))
+
+
+# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ LIST TYPES ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+
+# This types superclass should be List[T], but cython chokes on that...
+class ConstrainedList(list):  # type: ignore
+    # Needed for pydantic to detect that this is a list
+    __origin__ = list
+    __args__: Tuple[Type[T], ...]  # type: ignore
+
+    min_items: Optional[int] = None
+    max_items: Optional[int] = None
+    unique_items: Optional[bool] = None
+    item_type: Type[T]  # type: ignore
+
+    @classmethod
+    def __get_validators__(cls) -> 'CallableGenerator':
+        yield cls.list_length_validator
+        if cls.unique_items:
+            yield cls.unique_items_validator
+
+    @classmethod
+    def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None:
+        update_not_none(field_schema, minItems=cls.min_items, maxItems=cls.max_items, uniqueItems=cls.unique_items)
+
+    @classmethod
+    def list_length_validator(cls, v: 'Optional[List[T]]') -> 'Optional[List[T]]':
+        if v is None:
+            return None
+
+        v = list_validator(v)
+        v_len = len(v)
+
+        if cls.min_items is not None and v_len < cls.min_items:
+            raise errors.ListMinLengthError(limit_value=cls.min_items)
+
+        if cls.max_items is not None and v_len > cls.max_items:
+            raise errors.ListMaxLengthError(limit_value=cls.max_items)
+
+        return v
+
+    @classmethod
+    def unique_items_validator(cls, v: 'Optional[List[T]]') -> 'Optional[List[T]]':
+        if v is None:
+            return None
+
+        for i, value in enumerate(v, start=1):
+            if value in v[i:]:
+                raise errors.ListUniqueItemsError()
+
+        return v
+
+
+def conlist(
+    item_type: Type[T], *, min_items: Optional[int] = None, max_items: Optional[int] = None, unique_items: bool = None
+) -> Type[List[T]]:
+    # __args__ is needed to conform to typing generics api
+    namespace = dict(
+        min_items=min_items, max_items=max_items, unique_items=unique_items, item_type=item_type, __args__=(item_type,)
+    )
+    # We use new_class to be able to deal with Generic types
+    return new_class('ConstrainedListValue', (ConstrainedList,), {}, lambda ns: ns.update(namespace))
+
+
+# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ PYOBJECT TYPE ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+
+if TYPE_CHECKING:
+    PyObject = Callable[..., Any]
+else:
+
+    class PyObject:
+        validate_always = True
+
+        @classmethod
+        def __get_validators__(cls) -> 'CallableGenerator':
+            yield cls.validate
+
+        @classmethod
+        def validate(cls, value: Any) -> Any:
+            if isinstance(value, Callable):
+                return value
+
+            try:
+                value = str_validator(value)
+            except errors.StrError:
+                raise errors.PyObjectError(error_message='value is neither a valid import path not a valid callable')
+
+            try:
+                return import_string(value)
+            except ImportError as e:
+                raise errors.PyObjectError(error_message=str(e))
+
+
+# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ DECIMAL TYPES ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+
+class ConstrainedDecimal(Decimal, metaclass=ConstrainedNumberMeta):
+    gt: OptionalIntFloatDecimal = None
+    ge: OptionalIntFloatDecimal = None
+    lt: OptionalIntFloatDecimal = None
+    le: OptionalIntFloatDecimal = None
+    max_digits: OptionalInt = None
+    decimal_places: OptionalInt = None
+    multiple_of: OptionalIntFloatDecimal = None
+
+    @classmethod
+    def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None:
+        update_not_none(
+            field_schema,
+            exclusiveMinimum=cls.gt,
+            exclusiveMaximum=cls.lt,
+            minimum=cls.ge,
+            maximum=cls.le,
+            multipleOf=cls.multiple_of,
+        )
+
+    @classmethod
+    def __get_validators__(cls) -> 'CallableGenerator':
+        yield decimal_validator
+        yield number_size_validator
+        yield number_multiple_validator
+        yield cls.validate
+
+    @classmethod
+    def validate(cls, value: Decimal) -> Decimal:
+        try:
+            normalized_value = value.normalize()
+        except InvalidOperation:
+            normalized_value = value
+        digit_tuple, exponent = normalized_value.as_tuple()[1:]
+        if exponent in {'F', 'n', 'N'}:
+            raise errors.DecimalIsNotFiniteError()
+
+        if exponent >= 0:
+            # A positive exponent adds that many trailing zeros.
+            digits = len(digit_tuple) + exponent
+            decimals = 0
+        else:
+            # If the absolute value of the negative exponent is larger than the
+            # number of digits, then it's the same as the number of digits,
+            # because it'll consume all of the digits in digit_tuple and then
+            # add abs(exponent) - len(digit_tuple) leading zeros after the
+            # decimal point.
+            if abs(exponent) > len(digit_tuple):
+                digits = decimals = abs(exponent)
+            else:
+                digits = len(digit_tuple)
+                decimals = abs(exponent)
+        whole_digits = digits - decimals
+
+        if cls.max_digits is not None and digits > cls.max_digits:
+            raise errors.DecimalMaxDigitsError(max_digits=cls.max_digits)
+
+        if cls.decimal_places is not None and decimals > cls.decimal_places:
+            raise errors.DecimalMaxPlacesError(decimal_places=cls.decimal_places)
+
+        if cls.max_digits is not None and cls.decimal_places is not None:
+            expected = cls.max_digits - cls.decimal_places
+            if whole_digits > expected:
+                raise errors.DecimalWholeDigitsError(whole_digits=expected)
+
+        return value
+
+
+def condecimal(
+    *,
+    gt: Decimal = None,
+    ge: Decimal = None,
+    lt: Decimal = None,
+    le: Decimal = None,
+    max_digits: Optional[int] = None,
+    decimal_places: Optional[int] = None,
+    multiple_of: Decimal = None,
+) -> Type[Decimal]:
+    # use kwargs then define conf in a dict to aid with IDE type hinting
+    namespace = dict(
+        gt=gt, ge=ge, lt=lt, le=le, max_digits=max_digits, decimal_places=decimal_places, multiple_of=multiple_of
+    )
+    return type('ConstrainedDecimalValue', (ConstrainedDecimal,), namespace)
+
+
+# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ UUID TYPES ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+if TYPE_CHECKING:
+    UUID1 = UUID
+    UUID3 = UUID
+    UUID4 = UUID
+    UUID5 = UUID
+else:
+
+    class UUID1(UUID):
+        _required_version = 1
+
+        @classmethod
+        def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None:
+            field_schema.update(type='string', format=f'uuid{cls._required_version}')
+
+    class UUID3(UUID1):
+        _required_version = 3
+
+    class UUID4(UUID1):
+        _required_version = 4
+
+    class UUID5(UUID1):
+        _required_version = 5
+
+
+# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ PATH TYPES ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+if TYPE_CHECKING:
+    FilePath = Path
+    DirectoryPath = Path
+else:
+
+    class FilePath(Path):
+        @classmethod
+        def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None:
+            field_schema.update(format='file-path')
+
+        @classmethod
+        def __get_validators__(cls) -> 'CallableGenerator':
+            yield path_validator
+            yield path_exists_validator
+            yield cls.validate
+
+        @classmethod
+        def validate(cls, value: Path) -> Path:
+            if not value.is_file():
+                raise errors.PathNotAFileError(path=value)
+
+            return value
+
+    class DirectoryPath(Path):
+        @classmethod
+        def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None:
+            field_schema.update(format='directory-path')
+
+        @classmethod
+        def __get_validators__(cls) -> 'CallableGenerator':
+            yield path_validator
+            yield path_exists_validator
+            yield cls.validate
+
+        @classmethod
+        def validate(cls, value: Path) -> Path:
+            if not value.is_dir():
+                raise errors.PathNotADirectoryError(path=value)
+
+            return value
+
+
+# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ JSON TYPE ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+
+class JsonWrapper:
+    pass
+
+
+class JsonMeta(type):
+    def __getitem__(self, t: Type[Any]) -> Type[JsonWrapper]:
+        if t is Any:
+            return Json  # allow Json[Any] to replecate plain Json
+        return _registered(type('JsonWrapperValue', (JsonWrapper,), {'inner_type': t}))
+
+
+if TYPE_CHECKING:
+    Json = Annotated[T, ...]  # Json[list[str]] will be recognized by type checkers as list[str]
+
+else:
+
+    class Json(metaclass=JsonMeta):
+        @classmethod
+        def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None:
+            field_schema.update(type='string', format='json-string')
+
+
+# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ SECRET TYPES ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+
+class SecretField(abc.ABC):
+    """
+    Note: this should be implemented as a generic like `SecretField(ABC, Generic[T])`,
+          the `__init__()` should be part of the abstract class and the
+          `get_secret_value()` method should use the generic `T` type.
+
+          However Cython doesn't support very well generics at the moment and
+          the generated code fails to be imported (see
+          https://github.com/cython/cython/issues/2753).
+    """
+
+    def __eq__(self, other: Any) -> bool:
+        return isinstance(other, self.__class__) and self.get_secret_value() == other.get_secret_value()
+
+    def __str__(self) -> str:
+        return '**********' if self.get_secret_value() else ''
+
+    def __hash__(self) -> int:
+        return hash(self.get_secret_value())
+
+    @abc.abstractmethod
+    def get_secret_value(self) -> Any:  # pragma: no cover
+        ...
+
+
+class SecretStr(SecretField):
+    min_length: OptionalInt = None
+    max_length: OptionalInt = None
+
+    @classmethod
+    def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None:
+        update_not_none(
+            field_schema,
+            type='string',
+            writeOnly=True,
+            format='password',
+            minLength=cls.min_length,
+            maxLength=cls.max_length,
+        )
+
+    @classmethod
+    def __get_validators__(cls) -> 'CallableGenerator':
+        yield cls.validate
+        yield constr_length_validator
+
+    @classmethod
+    def validate(cls, value: Any) -> 'SecretStr':
+        if isinstance(value, cls):
+            return value
+        value = str_validator(value)
+        return cls(value)
+
+    def __init__(self, value: str):
+        self._secret_value = value
+
+    def __repr__(self) -> str:
+        return f"SecretStr('{self}')"
+
+    def __len__(self) -> int:
+        return len(self._secret_value)
+
+    def display(self) -> str:
+        warnings.warn('`secret_str.display()` is deprecated, use `str(secret_str)` instead', DeprecationWarning)
+        return str(self)
+
+    def get_secret_value(self) -> str:
+        return self._secret_value
+
+
+class SecretBytes(SecretField):
+    min_length: OptionalInt = None
+    max_length: OptionalInt = None
+
+    @classmethod
+    def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None:
+        update_not_none(
+            field_schema,
+            type='string',
+            writeOnly=True,
+            format='password',
+            minLength=cls.min_length,
+            maxLength=cls.max_length,
+        )
+
+    @classmethod
+    def __get_validators__(cls) -> 'CallableGenerator':
+        yield cls.validate
+        yield constr_length_validator
+
+    @classmethod
+    def validate(cls, value: Any) -> 'SecretBytes':
+        if isinstance(value, cls):
+            return value
+        value = bytes_validator(value)
+        return cls(value)
+
+    def __init__(self, value: bytes):
+        self._secret_value = value
+
+    def __repr__(self) -> str:
+        return f"SecretBytes(b'{self}')"
+
+    def __len__(self) -> int:
+        return len(self._secret_value)
+
+    def display(self) -> str:
+        warnings.warn('`secret_bytes.display()` is deprecated, use `str(secret_bytes)` instead', DeprecationWarning)
+        return str(self)
+
+    def get_secret_value(self) -> bytes:
+        return self._secret_value
+
+
+# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ PAYMENT CARD TYPES ~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+
+class PaymentCardBrand(str, Enum):
+    # If you add another card type, please also add it to the
+    # Hypothesis strategy in `pydantic._hypothesis_plugin`.
+    amex = 'American Express'
+    mastercard = 'Mastercard'
+    visa = 'Visa'
+    other = 'other'
+
+    def __str__(self) -> str:
+        return self.value
+
+
+class PaymentCardNumber(str):
+    """
+    Based on: https://en.wikipedia.org/wiki/Payment_card_number
+    """
+
+    strip_whitespace: ClassVar[bool] = True
+    min_length: ClassVar[int] = 12
+    max_length: ClassVar[int] = 19
+    bin: str
+    last4: str
+    brand: PaymentCardBrand
+
+    def __init__(self, card_number: str):
+        self.bin = card_number[:6]
+        self.last4 = card_number[-4:]
+        self.brand = self._get_brand(card_number)
+
+    @classmethod
+    def __get_validators__(cls) -> 'CallableGenerator':
+        yield str_validator
+        yield constr_strip_whitespace
+        yield constr_length_validator
+        yield cls.validate_digits
+        yield cls.validate_luhn_check_digit
+        yield cls
+        yield cls.validate_length_for_brand
+
+    @property
+    def masked(self) -> str:
+        num_masked = len(self) - 10  # len(bin) + len(last4) == 10
+        return f'{self.bin}{"*" * num_masked}{self.last4}'
+
+    @classmethod
+    def validate_digits(cls, card_number: str) -> str:
+        if not card_number.isdigit():
+            raise errors.NotDigitError
+        return card_number
+
+    @classmethod
+    def validate_luhn_check_digit(cls, card_number: str) -> str:
+        """
+        Based on: https://en.wikipedia.org/wiki/Luhn_algorithm
+        """
+        sum_ = int(card_number[-1])
+        length = len(card_number)
+        parity = length % 2
+        for i in range(length - 1):
+            digit = int(card_number[i])
+            if i % 2 == parity:
+                digit *= 2
+            if digit > 9:
+                digit -= 9
+            sum_ += digit
+        valid = sum_ % 10 == 0
+        if not valid:
+            raise errors.LuhnValidationError
+        return card_number
+
+    @classmethod
+    def validate_length_for_brand(cls, card_number: 'PaymentCardNumber') -> 'PaymentCardNumber':
+        """
+        Validate length based on BIN for major brands:
+        https://en.wikipedia.org/wiki/Payment_card_number#Issuer_identification_number_(IIN)
+        """
+        required_length: Union[None, int, str] = None
+        if card_number.brand in PaymentCardBrand.mastercard:
+            required_length = 16
+            valid = len(card_number) == required_length
+        elif card_number.brand == PaymentCardBrand.visa:
+            required_length = '13, 16 or 19'
+            valid = len(card_number) in {13, 16, 19}
+        elif card_number.brand == PaymentCardBrand.amex:
+            required_length = 15
+            valid = len(card_number) == required_length
+        else:
+            valid = True
+        if not valid:
+            raise errors.InvalidLengthForBrand(brand=card_number.brand, required_length=required_length)
+        return card_number
+
+    @staticmethod
+    def _get_brand(card_number: str) -> PaymentCardBrand:
+        if card_number[0] == '4':
+            brand = PaymentCardBrand.visa
+        elif 51 <= int(card_number[:2]) <= 55:
+            brand = PaymentCardBrand.mastercard
+        elif card_number[:2] in {'34', '37'}:
+            brand = PaymentCardBrand.amex
+        else:
+            brand = PaymentCardBrand.other
+        return brand
+
+
+# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ BYTE SIZE TYPE ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+BYTE_SIZES = {
+    'b': 1,
+    'kb': 10**3,
+    'mb': 10**6,
+    'gb': 10**9,
+    'tb': 10**12,
+    'pb': 10**15,
+    'eb': 10**18,
+    'kib': 2**10,
+    'mib': 2**20,
+    'gib': 2**30,
+    'tib': 2**40,
+    'pib': 2**50,
+    'eib': 2**60,
+}
+BYTE_SIZES.update({k.lower()[0]: v for k, v in BYTE_SIZES.items() if 'i' not in k})
+byte_string_re = re.compile(r'^\s*(\d*\.?\d+)\s*(\w+)?', re.IGNORECASE)
+
+
+class ByteSize(int):
+    @classmethod
+    def __get_validators__(cls) -> 'CallableGenerator':
+        yield cls.validate
+
+    @classmethod
+    def validate(cls, v: StrIntFloat) -> 'ByteSize':
+        try:
+            return cls(int(v))
+        except ValueError:
+            pass
+
+        str_match = byte_string_re.match(str(v))
+        if str_match is None:
+            raise errors.InvalidByteSize()
+
+        scalar, unit = str_match.groups()
+        if unit is None:
+            unit = 'b'
+
+        try:
+            unit_mult = BYTE_SIZES[unit.lower()]
+        except KeyError:
+            raise errors.InvalidByteSizeUnit(unit=unit)
+
+        return cls(int(float(scalar) * unit_mult))
+
+    def human_readable(self, decimal: bool = False) -> str:
+        if decimal:
+            divisor = 1000
+            units = ['B', 'KB', 'MB', 'GB', 'TB', 'PB']
+            final_unit = 'EB'
+        else:
+            divisor = 1024
+            units = ['B', 'KiB', 'MiB', 'GiB', 'TiB', 'PiB']
+            final_unit = 'EiB'
+
+        num = float(self)
+        for unit in units:
+            if abs(num) < divisor:
+                return f'{num:0.1f}{unit}'
+            num /= divisor
+
+        return f'{num:0.1f}{final_unit}'
+
+    def to(self, unit: str) -> float:
+        try:
+            unit_div = BYTE_SIZES[unit.lower()]
+        except KeyError:
+            raise errors.InvalidByteSizeUnit(unit=unit)
+
+        return self / unit_div
+
+
+# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ DATE TYPES ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+if TYPE_CHECKING:
+    PastDate = date
+    FutureDate = date
+else:
+
+    class PastDate(date):
+        @classmethod
+        def __get_validators__(cls) -> 'CallableGenerator':
+            yield parse_date
+            yield cls.validate
+
+        @classmethod
+        def validate(cls, value: date) -> date:
+            if value >= date.today():
+                raise errors.DateNotInThePastError()
+
+            return value
+
+    class FutureDate(date):
+        @classmethod
+        def __get_validators__(cls) -> 'CallableGenerator':
+            yield parse_date
+            yield cls.validate
+
+        @classmethod
+        def validate(cls, value: date) -> date:
+            if value <= date.today():
+                raise errors.DateNotInTheFutureError()
+
+            return value
+
+
+class ConstrainedDate(date, metaclass=ConstrainedNumberMeta):
+    gt: OptionalDate = None
+    ge: OptionalDate = None
+    lt: OptionalDate = None
+    le: OptionalDate = None
+
+    @classmethod
+    def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None:
+        update_not_none(field_schema, exclusiveMinimum=cls.gt, exclusiveMaximum=cls.lt, minimum=cls.ge, maximum=cls.le)
+
+    @classmethod
+    def __get_validators__(cls) -> 'CallableGenerator':
+        yield parse_date
+        yield number_size_validator
+
+
+def condate(
+    *,
+    gt: date = None,
+    ge: date = None,
+    lt: date = None,
+    le: date = None,
+) -> Type[date]:
+    # use kwargs then define conf in a dict to aid with IDE type hinting
+    namespace = dict(gt=gt, ge=ge, lt=lt, le=le)
+    return type('ConstrainedDateValue', (ConstrainedDate,), namespace)
diff --git a/.venv/lib/python3.12/site-packages/pydantic/v1/typing.py b/.venv/lib/python3.12/site-packages/pydantic/v1/typing.py
new file mode 100644
index 00000000..7dd341ce
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pydantic/v1/typing.py
@@ -0,0 +1,608 @@
+import sys
+import typing
+from collections.abc import Callable
+from os import PathLike
+from typing import (  # type: ignore
+    TYPE_CHECKING,
+    AbstractSet,
+    Any,
+    Callable as TypingCallable,
+    ClassVar,
+    Dict,
+    ForwardRef,
+    Generator,
+    Iterable,
+    List,
+    Mapping,
+    NewType,
+    Optional,
+    Sequence,
+    Set,
+    Tuple,
+    Type,
+    TypeVar,
+    Union,
+    _eval_type,
+    cast,
+    get_type_hints,
+)
+
+from typing_extensions import (
+    Annotated,
+    Final,
+    Literal,
+    NotRequired as TypedDictNotRequired,
+    Required as TypedDictRequired,
+)
+
+try:
+    from typing import _TypingBase as typing_base  # type: ignore
+except ImportError:
+    from typing import _Final as typing_base  # type: ignore
+
+try:
+    from typing import GenericAlias as TypingGenericAlias  # type: ignore
+except ImportError:
+    # python < 3.9 does not have GenericAlias (list[int], tuple[str, ...] and so on)
+    TypingGenericAlias = ()
+
+try:
+    from types import UnionType as TypesUnionType  # type: ignore
+except ImportError:
+    # python < 3.10 does not have UnionType (str | int, byte | bool and so on)
+    TypesUnionType = ()
+
+
+if sys.version_info < (3, 9):
+
+    def evaluate_forwardref(type_: ForwardRef, globalns: Any, localns: Any) -> Any:
+        return type_._evaluate(globalns, localns)
+
+else:
+
+    def evaluate_forwardref(type_: ForwardRef, globalns: Any, localns: Any) -> Any:
+        # Even though it is the right signature for python 3.9, mypy complains with
+        # `error: Too many arguments for "_evaluate" of "ForwardRef"` hence the cast...
+        # Python 3.13/3.12.4+ made `recursive_guard` a kwarg, so name it explicitly to avoid:
+        # TypeError: ForwardRef._evaluate() missing 1 required keyword-only argument: 'recursive_guard'
+        return cast(Any, type_)._evaluate(globalns, localns, recursive_guard=set())
+
+
+if sys.version_info < (3, 9):
+    # Ensure we always get all the whole `Annotated` hint, not just the annotated type.
+    # For 3.7 to 3.8, `get_type_hints` doesn't recognize `typing_extensions.Annotated`,
+    # so it already returns the full annotation
+    get_all_type_hints = get_type_hints
+
+else:
+
+    def get_all_type_hints(obj: Any, globalns: Any = None, localns: Any = None) -> Any:
+        return get_type_hints(obj, globalns, localns, include_extras=True)
+
+
+_T = TypeVar('_T')
+
+AnyCallable = TypingCallable[..., Any]
+NoArgAnyCallable = TypingCallable[[], Any]
+
+# workaround for https://github.com/python/mypy/issues/9496
+AnyArgTCallable = TypingCallable[..., _T]
+
+
+# Annotated[...] is implemented by returning an instance of one of these classes, depending on
+# python/typing_extensions version.
+AnnotatedTypeNames = {'AnnotatedMeta', '_AnnotatedAlias'}
+
+
+LITERAL_TYPES: Set[Any] = {Literal}
+if hasattr(typing, 'Literal'):
+    LITERAL_TYPES.add(typing.Literal)
+
+
+if sys.version_info < (3, 8):
+
+    def get_origin(t: Type[Any]) -> Optional[Type[Any]]:
+        if type(t).__name__ in AnnotatedTypeNames:
+            # weirdly this is a runtime requirement, as well as for mypy
+            return cast(Type[Any], Annotated)
+        return getattr(t, '__origin__', None)
+
+else:
+    from typing import get_origin as _typing_get_origin
+
+    def get_origin(tp: Type[Any]) -> Optional[Type[Any]]:
+        """
+        We can't directly use `typing.get_origin` since we need a fallback to support
+        custom generic classes like `ConstrainedList`
+        It should be useless once https://github.com/cython/cython/issues/3537 is
+        solved and https://github.com/pydantic/pydantic/pull/1753 is merged.
+        """
+        if type(tp).__name__ in AnnotatedTypeNames:
+            return cast(Type[Any], Annotated)  # mypy complains about _SpecialForm
+        return _typing_get_origin(tp) or getattr(tp, '__origin__', None)
+
+
+if sys.version_info < (3, 8):
+    from typing import _GenericAlias
+
+    def get_args(t: Type[Any]) -> Tuple[Any, ...]:
+        """Compatibility version of get_args for python 3.7.
+
+        Mostly compatible with the python 3.8 `typing` module version
+        and able to handle almost all use cases.
+        """
+        if type(t).__name__ in AnnotatedTypeNames:
+            return t.__args__ + t.__metadata__
+        if isinstance(t, _GenericAlias):
+            res = t.__args__
+            if t.__origin__ is Callable and res and res[0] is not Ellipsis:
+                res = (list(res[:-1]), res[-1])
+            return res
+        return getattr(t, '__args__', ())
+
+else:
+    from typing import get_args as _typing_get_args
+
+    def _generic_get_args(tp: Type[Any]) -> Tuple[Any, ...]:
+        """
+        In python 3.9, `typing.Dict`, `typing.List`, ...
+        do have an empty `__args__` by default (instead of the generic ~T for example).
+        In order to still support `Dict` for example and consider it as `Dict[Any, Any]`,
+        we retrieve the `_nparams` value that tells us how many parameters it needs.
+        """
+        if hasattr(tp, '_nparams'):
+            return (Any,) * tp._nparams
+        # Special case for `tuple[()]`, which used to return ((),) with `typing.Tuple`
+        # in python 3.10- but now returns () for `tuple` and `Tuple`.
+        # This will probably be clarified in pydantic v2
+        try:
+            if tp == Tuple[()] or sys.version_info >= (3, 9) and tp == tuple[()]:  # type: ignore[misc]
+                return ((),)
+        # there is a TypeError when compiled with cython
+        except TypeError:  # pragma: no cover
+            pass
+        return ()
+
+    def get_args(tp: Type[Any]) -> Tuple[Any, ...]:
+        """Get type arguments with all substitutions performed.
+
+        For unions, basic simplifications used by Union constructor are performed.
+        Examples::
+            get_args(Dict[str, int]) == (str, int)
+            get_args(int) == ()
+            get_args(Union[int, Union[T, int], str][int]) == (int, str)
+            get_args(Union[int, Tuple[T, int]][str]) == (int, Tuple[str, int])
+            get_args(Callable[[], T][int]) == ([], int)
+        """
+        if type(tp).__name__ in AnnotatedTypeNames:
+            return tp.__args__ + tp.__metadata__
+        # the fallback is needed for the same reasons as `get_origin` (see above)
+        return _typing_get_args(tp) or getattr(tp, '__args__', ()) or _generic_get_args(tp)
+
+
+if sys.version_info < (3, 9):
+
+    def convert_generics(tp: Type[Any]) -> Type[Any]:
+        """Python 3.9 and older only supports generics from `typing` module.
+        They convert strings to ForwardRef automatically.
+
+        Examples::
+            typing.List['Hero'] == typing.List[ForwardRef('Hero')]
+        """
+        return tp
+
+else:
+    from typing import _UnionGenericAlias  # type: ignore
+
+    from typing_extensions import _AnnotatedAlias
+
+    def convert_generics(tp: Type[Any]) -> Type[Any]:
+        """
+        Recursively searches for `str` type hints and replaces them with ForwardRef.
+
+        Examples::
+            convert_generics(list['Hero']) == list[ForwardRef('Hero')]
+            convert_generics(dict['Hero', 'Team']) == dict[ForwardRef('Hero'), ForwardRef('Team')]
+            convert_generics(typing.Dict['Hero', 'Team']) == typing.Dict[ForwardRef('Hero'), ForwardRef('Team')]
+            convert_generics(list[str | 'Hero'] | int) == list[str | ForwardRef('Hero')] | int
+        """
+        origin = get_origin(tp)
+        if not origin or not hasattr(tp, '__args__'):
+            return tp
+
+        args = get_args(tp)
+
+        # typing.Annotated needs special treatment
+        if origin is Annotated:
+            return _AnnotatedAlias(convert_generics(args[0]), args[1:])
+
+        # recursively replace `str` instances inside of `GenericAlias` with `ForwardRef(arg)`
+        converted = tuple(
+            ForwardRef(arg) if isinstance(arg, str) and isinstance(tp, TypingGenericAlias) else convert_generics(arg)
+            for arg in args
+        )
+
+        if converted == args:
+            return tp
+        elif isinstance(tp, TypingGenericAlias):
+            return TypingGenericAlias(origin, converted)
+        elif isinstance(tp, TypesUnionType):
+            # recreate types.UnionType (PEP604, Python >= 3.10)
+            return _UnionGenericAlias(origin, converted)
+        else:
+            try:
+                setattr(tp, '__args__', converted)
+            except AttributeError:
+                pass
+            return tp
+
+
+if sys.version_info < (3, 10):
+
+    def is_union(tp: Optional[Type[Any]]) -> bool:
+        return tp is Union
+
+    WithArgsTypes = (TypingGenericAlias,)
+
+else:
+    import types
+    import typing
+
+    def is_union(tp: Optional[Type[Any]]) -> bool:
+        return tp is Union or tp is types.UnionType  # noqa: E721
+
+    WithArgsTypes = (typing._GenericAlias, types.GenericAlias, types.UnionType)
+
+
+StrPath = Union[str, PathLike]
+
+
+if TYPE_CHECKING:
+    from pydantic.v1.fields import ModelField
+
+    TupleGenerator = Generator[Tuple[str, Any], None, None]
+    DictStrAny = Dict[str, Any]
+    DictAny = Dict[Any, Any]
+    SetStr = Set[str]
+    ListStr = List[str]
+    IntStr = Union[int, str]
+    AbstractSetIntStr = AbstractSet[IntStr]
+    DictIntStrAny = Dict[IntStr, Any]
+    MappingIntStrAny = Mapping[IntStr, Any]
+    CallableGenerator = Generator[AnyCallable, None, None]
+    ReprArgs = Sequence[Tuple[Optional[str], Any]]
+
+    MYPY = False
+    if MYPY:
+        AnyClassMethod = classmethod[Any]
+    else:
+        # classmethod[TargetType, CallableParamSpecType, CallableReturnType]
+        AnyClassMethod = classmethod[Any, Any, Any]
+
+__all__ = (
+    'AnyCallable',
+    'NoArgAnyCallable',
+    'NoneType',
+    'is_none_type',
+    'display_as_type',
+    'resolve_annotations',
+    'is_callable_type',
+    'is_literal_type',
+    'all_literal_values',
+    'is_namedtuple',
+    'is_typeddict',
+    'is_typeddict_special',
+    'is_new_type',
+    'new_type_supertype',
+    'is_classvar',
+    'is_finalvar',
+    'update_field_forward_refs',
+    'update_model_forward_refs',
+    'TupleGenerator',
+    'DictStrAny',
+    'DictAny',
+    'SetStr',
+    'ListStr',
+    'IntStr',
+    'AbstractSetIntStr',
+    'DictIntStrAny',
+    'CallableGenerator',
+    'ReprArgs',
+    'AnyClassMethod',
+    'CallableGenerator',
+    'WithArgsTypes',
+    'get_args',
+    'get_origin',
+    'get_sub_types',
+    'typing_base',
+    'get_all_type_hints',
+    'is_union',
+    'StrPath',
+    'MappingIntStrAny',
+)
+
+
+NoneType = None.__class__
+
+
+NONE_TYPES: Tuple[Any, Any, Any] = (None, NoneType, Literal[None])
+
+
+if sys.version_info < (3, 8):
+    # Even though this implementation is slower, we need it for python 3.7:
+    # In python 3.7 "Literal" is not a builtin type and uses a different
+    # mechanism.
+    # for this reason `Literal[None] is Literal[None]` evaluates to `False`,
+    # breaking the faster implementation used for the other python versions.
+
+    def is_none_type(type_: Any) -> bool:
+        return type_ in NONE_TYPES
+
+elif sys.version_info[:2] == (3, 8):
+
+    def is_none_type(type_: Any) -> bool:
+        for none_type in NONE_TYPES:
+            if type_ is none_type:
+                return True
+        # With python 3.8, specifically 3.8.10, Literal "is" check sare very flakey
+        # can change on very subtle changes like use of types in other modules,
+        # hopefully this check avoids that issue.
+        if is_literal_type(type_):  # pragma: no cover
+            return all_literal_values(type_) == (None,)
+        return False
+
+else:
+
+    def is_none_type(type_: Any) -> bool:
+        return type_ in NONE_TYPES
+
+
+def display_as_type(v: Type[Any]) -> str:
+    if not isinstance(v, typing_base) and not isinstance(v, WithArgsTypes) and not isinstance(v, type):
+        v = v.__class__
+
+    if is_union(get_origin(v)):
+        return f'Union[{", ".join(map(display_as_type, get_args(v)))}]'
+
+    if isinstance(v, WithArgsTypes):
+        # Generic alias are constructs like `list[int]`
+        return str(v).replace('typing.', '')
+
+    try:
+        return v.__name__
+    except AttributeError:
+        # happens with typing objects
+        return str(v).replace('typing.', '')
+
+
+def resolve_annotations(raw_annotations: Dict[str, Type[Any]], module_name: Optional[str]) -> Dict[str, Type[Any]]:
+    """
+    Partially taken from typing.get_type_hints.
+
+    Resolve string or ForwardRef annotations into type objects if possible.
+    """
+    base_globals: Optional[Dict[str, Any]] = None
+    if module_name:
+        try:
+            module = sys.modules[module_name]
+        except KeyError:
+            # happens occasionally, see https://github.com/pydantic/pydantic/issues/2363
+            pass
+        else:
+            base_globals = module.__dict__
+
+    annotations = {}
+    for name, value in raw_annotations.items():
+        if isinstance(value, str):
+            if (3, 10) > sys.version_info >= (3, 9, 8) or sys.version_info >= (3, 10, 1):
+                value = ForwardRef(value, is_argument=False, is_class=True)
+            else:
+                value = ForwardRef(value, is_argument=False)
+        try:
+            if sys.version_info >= (3, 13):
+                value = _eval_type(value, base_globals, None, type_params=())
+            else:
+                value = _eval_type(value, base_globals, None)
+        except NameError:
+            # this is ok, it can be fixed with update_forward_refs
+            pass
+        annotations[name] = value
+    return annotations
+
+
+def is_callable_type(type_: Type[Any]) -> bool:
+    return type_ is Callable or get_origin(type_) is Callable
+
+
+def is_literal_type(type_: Type[Any]) -> bool:
+    return Literal is not None and get_origin(type_) in LITERAL_TYPES
+
+
+def literal_values(type_: Type[Any]) -> Tuple[Any, ...]:
+    return get_args(type_)
+
+
+def all_literal_values(type_: Type[Any]) -> Tuple[Any, ...]:
+    """
+    This method is used to retrieve all Literal values as
+    Literal can be used recursively (see https://www.python.org/dev/peps/pep-0586)
+    e.g. `Literal[Literal[Literal[1, 2, 3], "foo"], 5, None]`
+    """
+    if not is_literal_type(type_):
+        return (type_,)
+
+    values = literal_values(type_)
+    return tuple(x for value in values for x in all_literal_values(value))
+
+
+def is_namedtuple(type_: Type[Any]) -> bool:
+    """
+    Check if a given class is a named tuple.
+    It can be either a `typing.NamedTuple` or `collections.namedtuple`
+    """
+    from pydantic.v1.utils import lenient_issubclass
+
+    return lenient_issubclass(type_, tuple) and hasattr(type_, '_fields')
+
+
+def is_typeddict(type_: Type[Any]) -> bool:
+    """
+    Check if a given class is a typed dict (from `typing` or `typing_extensions`)
+    In 3.10, there will be a public method (https://docs.python.org/3.10/library/typing.html#typing.is_typeddict)
+    """
+    from pydantic.v1.utils import lenient_issubclass
+
+    return lenient_issubclass(type_, dict) and hasattr(type_, '__total__')
+
+
+def _check_typeddict_special(type_: Any) -> bool:
+    return type_ is TypedDictRequired or type_ is TypedDictNotRequired
+
+
+def is_typeddict_special(type_: Any) -> bool:
+    """
+    Check if type is a TypedDict special form (Required or NotRequired).
+    """
+    return _check_typeddict_special(type_) or _check_typeddict_special(get_origin(type_))
+
+
+test_type = NewType('test_type', str)
+
+
+def is_new_type(type_: Type[Any]) -> bool:
+    """
+    Check whether type_ was created using typing.NewType
+    """
+    return isinstance(type_, test_type.__class__) and hasattr(type_, '__supertype__')  # type: ignore
+
+
+def new_type_supertype(type_: Type[Any]) -> Type[Any]:
+    while hasattr(type_, '__supertype__'):
+        type_ = type_.__supertype__
+    return type_
+
+
+def _check_classvar(v: Optional[Type[Any]]) -> bool:
+    if v is None:
+        return False
+
+    return v.__class__ == ClassVar.__class__ and getattr(v, '_name', None) == 'ClassVar'
+
+
+def _check_finalvar(v: Optional[Type[Any]]) -> bool:
+    """
+    Check if a given type is a `typing.Final` type.
+    """
+    if v is None:
+        return False
+
+    return v.__class__ == Final.__class__ and (sys.version_info < (3, 8) or getattr(v, '_name', None) == 'Final')
+
+
+def is_classvar(ann_type: Type[Any]) -> bool:
+    if _check_classvar(ann_type) or _check_classvar(get_origin(ann_type)):
+        return True
+
+    # this is an ugly workaround for class vars that contain forward references and are therefore themselves
+    # forward references, see #3679
+    if ann_type.__class__ == ForwardRef and ann_type.__forward_arg__.startswith('ClassVar['):
+        return True
+
+    return False
+
+
+def is_finalvar(ann_type: Type[Any]) -> bool:
+    return _check_finalvar(ann_type) or _check_finalvar(get_origin(ann_type))
+
+
+def update_field_forward_refs(field: 'ModelField', globalns: Any, localns: Any) -> None:
+    """
+    Try to update ForwardRefs on fields based on this ModelField, globalns and localns.
+    """
+    prepare = False
+    if field.type_.__class__ == ForwardRef:
+        prepare = True
+        field.type_ = evaluate_forwardref(field.type_, globalns, localns or None)
+    if field.outer_type_.__class__ == ForwardRef:
+        prepare = True
+        field.outer_type_ = evaluate_forwardref(field.outer_type_, globalns, localns or None)
+    if prepare:
+        field.prepare()
+
+    if field.sub_fields:
+        for sub_f in field.sub_fields:
+            update_field_forward_refs(sub_f, globalns=globalns, localns=localns)
+
+    if field.discriminator_key is not None:
+        field.prepare_discriminated_union_sub_fields()
+
+
+def update_model_forward_refs(
+    model: Type[Any],
+    fields: Iterable['ModelField'],
+    json_encoders: Dict[Union[Type[Any], str, ForwardRef], AnyCallable],
+    localns: 'DictStrAny',
+    exc_to_suppress: Tuple[Type[BaseException], ...] = (),
+) -> None:
+    """
+    Try to update model fields ForwardRefs based on model and localns.
+    """
+    if model.__module__ in sys.modules:
+        globalns = sys.modules[model.__module__].__dict__.copy()
+    else:
+        globalns = {}
+
+    globalns.setdefault(model.__name__, model)
+
+    for f in fields:
+        try:
+            update_field_forward_refs(f, globalns=globalns, localns=localns)
+        except exc_to_suppress:
+            pass
+
+    for key in set(json_encoders.keys()):
+        if isinstance(key, str):
+            fr: ForwardRef = ForwardRef(key)
+        elif isinstance(key, ForwardRef):
+            fr = key
+        else:
+            continue
+
+        try:
+            new_key = evaluate_forwardref(fr, globalns, localns or None)
+        except exc_to_suppress:  # pragma: no cover
+            continue
+
+        json_encoders[new_key] = json_encoders.pop(key)
+
+
+def get_class(type_: Type[Any]) -> Union[None, bool, Type[Any]]:
+    """
+    Tries to get the class of a Type[T] annotation. Returns True if Type is used
+    without brackets. Otherwise returns None.
+    """
+    if type_ is type:
+        return True
+
+    if get_origin(type_) is None:
+        return None
+
+    args = get_args(type_)
+    if not args or not isinstance(args[0], type):
+        return True
+    else:
+        return args[0]
+
+
+def get_sub_types(tp: Any) -> List[Any]:
+    """
+    Return all the types that are allowed by type `tp`
+    `tp` can be a `Union` of allowed types or an `Annotated` type
+    """
+    origin = get_origin(tp)
+    if origin is Annotated:
+        return get_sub_types(get_args(tp)[0])
+    elif is_union(origin):
+        return [x for t in get_args(tp) for x in get_sub_types(t)]
+    else:
+        return [tp]
diff --git a/.venv/lib/python3.12/site-packages/pydantic/v1/utils.py b/.venv/lib/python3.12/site-packages/pydantic/v1/utils.py
new file mode 100644
index 00000000..0bd238ee
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pydantic/v1/utils.py
@@ -0,0 +1,804 @@
+import keyword
+import warnings
+import weakref
+from collections import OrderedDict, defaultdict, deque
+from copy import deepcopy
+from itertools import islice, zip_longest
+from types import BuiltinFunctionType, CodeType, FunctionType, GeneratorType, LambdaType, ModuleType
+from typing import (
+    TYPE_CHECKING,
+    AbstractSet,
+    Any,
+    Callable,
+    Collection,
+    Dict,
+    Generator,
+    Iterable,
+    Iterator,
+    List,
+    Mapping,
+    NoReturn,
+    Optional,
+    Set,
+    Tuple,
+    Type,
+    TypeVar,
+    Union,
+)
+
+from typing_extensions import Annotated
+
+from pydantic.v1.errors import ConfigError
+from pydantic.v1.typing import (
+    NoneType,
+    WithArgsTypes,
+    all_literal_values,
+    display_as_type,
+    get_args,
+    get_origin,
+    is_literal_type,
+    is_union,
+)
+from pydantic.v1.version import version_info
+
+if TYPE_CHECKING:
+    from inspect import Signature
+    from pathlib import Path
+
+    from pydantic.v1.config import BaseConfig
+    from pydantic.v1.dataclasses import Dataclass
+    from pydantic.v1.fields import ModelField
+    from pydantic.v1.main import BaseModel
+    from pydantic.v1.typing import AbstractSetIntStr, DictIntStrAny, IntStr, MappingIntStrAny, ReprArgs
+
+    RichReprResult = Iterable[Union[Any, Tuple[Any], Tuple[str, Any], Tuple[str, Any, Any]]]
+
+__all__ = (
+    'import_string',
+    'sequence_like',
+    'validate_field_name',
+    'lenient_isinstance',
+    'lenient_issubclass',
+    'in_ipython',
+    'is_valid_identifier',
+    'deep_update',
+    'update_not_none',
+    'almost_equal_floats',
+    'get_model',
+    'to_camel',
+    'to_lower_camel',
+    'is_valid_field',
+    'smart_deepcopy',
+    'PyObjectStr',
+    'Representation',
+    'GetterDict',
+    'ValueItems',
+    'version_info',  # required here to match behaviour in v1.3
+    'ClassAttribute',
+    'path_type',
+    'ROOT_KEY',
+    'get_unique_discriminator_alias',
+    'get_discriminator_alias_and_values',
+    'DUNDER_ATTRIBUTES',
+)
+
+ROOT_KEY = '__root__'
+# these are types that are returned unchanged by deepcopy
+IMMUTABLE_NON_COLLECTIONS_TYPES: Set[Type[Any]] = {
+    int,
+    float,
+    complex,
+    str,
+    bool,
+    bytes,
+    type,
+    NoneType,
+    FunctionType,
+    BuiltinFunctionType,
+    LambdaType,
+    weakref.ref,
+    CodeType,
+    # note: including ModuleType will differ from behaviour of deepcopy by not producing error.
+    # It might be not a good idea in general, but considering that this function used only internally
+    # against default values of fields, this will allow to actually have a field with module as default value
+    ModuleType,
+    NotImplemented.__class__,
+    Ellipsis.__class__,
+}
+
+# these are types that if empty, might be copied with simple copy() instead of deepcopy()
+BUILTIN_COLLECTIONS: Set[Type[Any]] = {
+    list,
+    set,
+    tuple,
+    frozenset,
+    dict,
+    OrderedDict,
+    defaultdict,
+    deque,
+}
+
+
+def import_string(dotted_path: str) -> Any:
+    """
+    Stolen approximately from django. Import a dotted module path and return the attribute/class designated by the
+    last name in the path. Raise ImportError if the import fails.
+    """
+    from importlib import import_module
+
+    try:
+        module_path, class_name = dotted_path.strip(' ').rsplit('.', 1)
+    except ValueError as e:
+        raise ImportError(f'"{dotted_path}" doesn\'t look like a module path') from e
+
+    module = import_module(module_path)
+    try:
+        return getattr(module, class_name)
+    except AttributeError as e:
+        raise ImportError(f'Module "{module_path}" does not define a "{class_name}" attribute') from e
+
+
+def truncate(v: Union[str], *, max_len: int = 80) -> str:
+    """
+    Truncate a value and add a unicode ellipsis (three dots) to the end if it was too long
+    """
+    warnings.warn('`truncate` is no-longer used by pydantic and is deprecated', DeprecationWarning)
+    if isinstance(v, str) and len(v) > (max_len - 2):
+        # -3 so quote + string + … + quote has correct length
+        return (v[: (max_len - 3)] + '…').__repr__()
+    try:
+        v = v.__repr__()
+    except TypeError:
+        v = v.__class__.__repr__(v)  # in case v is a type
+    if len(v) > max_len:
+        v = v[: max_len - 1] + '…'
+    return v
+
+
+def sequence_like(v: Any) -> bool:
+    return isinstance(v, (list, tuple, set, frozenset, GeneratorType, deque))
+
+
+def validate_field_name(bases: List[Type['BaseModel']], field_name: str) -> None:
+    """
+    Ensure that the field's name does not shadow an existing attribute of the model.
+    """
+    for base in bases:
+        if getattr(base, field_name, None):
+            raise NameError(
+                f'Field name "{field_name}" shadows a BaseModel attribute; '
+                f'use a different field name with "alias=\'{field_name}\'".'
+            )
+
+
+def lenient_isinstance(o: Any, class_or_tuple: Union[Type[Any], Tuple[Type[Any], ...], None]) -> bool:
+    try:
+        return isinstance(o, class_or_tuple)  # type: ignore[arg-type]
+    except TypeError:
+        return False
+
+
+def lenient_issubclass(cls: Any, class_or_tuple: Union[Type[Any], Tuple[Type[Any], ...], None]) -> bool:
+    try:
+        return isinstance(cls, type) and issubclass(cls, class_or_tuple)  # type: ignore[arg-type]
+    except TypeError:
+        if isinstance(cls, WithArgsTypes):
+            return False
+        raise  # pragma: no cover
+
+
+def in_ipython() -> bool:
+    """
+    Check whether we're in an ipython environment, including jupyter notebooks.
+    """
+    try:
+        eval('__IPYTHON__')
+    except NameError:
+        return False
+    else:  # pragma: no cover
+        return True
+
+
+def is_valid_identifier(identifier: str) -> bool:
+    """
+    Checks that a string is a valid identifier and not a Python keyword.
+    :param identifier: The identifier to test.
+    :return: True if the identifier is valid.
+    """
+    return identifier.isidentifier() and not keyword.iskeyword(identifier)
+
+
+KeyType = TypeVar('KeyType')
+
+
+def deep_update(mapping: Dict[KeyType, Any], *updating_mappings: Dict[KeyType, Any]) -> Dict[KeyType, Any]:
+    updated_mapping = mapping.copy()
+    for updating_mapping in updating_mappings:
+        for k, v in updating_mapping.items():
+            if k in updated_mapping and isinstance(updated_mapping[k], dict) and isinstance(v, dict):
+                updated_mapping[k] = deep_update(updated_mapping[k], v)
+            else:
+                updated_mapping[k] = v
+    return updated_mapping
+
+
+def update_not_none(mapping: Dict[Any, Any], **update: Any) -> None:
+    mapping.update({k: v for k, v in update.items() if v is not None})
+
+
+def almost_equal_floats(value_1: float, value_2: float, *, delta: float = 1e-8) -> bool:
+    """
+    Return True if two floats are almost equal
+    """
+    return abs(value_1 - value_2) <= delta
+
+
+def generate_model_signature(
+    init: Callable[..., None], fields: Dict[str, 'ModelField'], config: Type['BaseConfig']
+) -> 'Signature':
+    """
+    Generate signature for model based on its fields
+    """
+    from inspect import Parameter, Signature, signature
+
+    from pydantic.v1.config import Extra
+
+    present_params = signature(init).parameters.values()
+    merged_params: Dict[str, Parameter] = {}
+    var_kw = None
+    use_var_kw = False
+
+    for param in islice(present_params, 1, None):  # skip self arg
+        if param.kind is param.VAR_KEYWORD:
+            var_kw = param
+            continue
+        merged_params[param.name] = param
+
+    if var_kw:  # if custom init has no var_kw, fields which are not declared in it cannot be passed through
+        allow_names = config.allow_population_by_field_name
+        for field_name, field in fields.items():
+            param_name = field.alias
+            if field_name in merged_params or param_name in merged_params:
+                continue
+            elif not is_valid_identifier(param_name):
+                if allow_names and is_valid_identifier(field_name):
+                    param_name = field_name
+                else:
+                    use_var_kw = True
+                    continue
+
+            # TODO: replace annotation with actual expected types once #1055 solved
+            kwargs = {'default': field.default} if not field.required else {}
+            merged_params[param_name] = Parameter(
+                param_name, Parameter.KEYWORD_ONLY, annotation=field.annotation, **kwargs
+            )
+
+    if config.extra is Extra.allow:
+        use_var_kw = True
+
+    if var_kw and use_var_kw:
+        # Make sure the parameter for extra kwargs
+        # does not have the same name as a field
+        default_model_signature = [
+            ('__pydantic_self__', Parameter.POSITIONAL_OR_KEYWORD),
+            ('data', Parameter.VAR_KEYWORD),
+        ]
+        if [(p.name, p.kind) for p in present_params] == default_model_signature:
+            # if this is the standard model signature, use extra_data as the extra args name
+            var_kw_name = 'extra_data'
+        else:
+            # else start from var_kw
+            var_kw_name = var_kw.name
+
+        # generate a name that's definitely unique
+        while var_kw_name in fields:
+            var_kw_name += '_'
+        merged_params[var_kw_name] = var_kw.replace(name=var_kw_name)
+
+    return Signature(parameters=list(merged_params.values()), return_annotation=None)
+
+
+def get_model(obj: Union[Type['BaseModel'], Type['Dataclass']]) -> Type['BaseModel']:
+    from pydantic.v1.main import BaseModel
+
+    try:
+        model_cls = obj.__pydantic_model__  # type: ignore
+    except AttributeError:
+        model_cls = obj
+
+    if not issubclass(model_cls, BaseModel):
+        raise TypeError('Unsupported type, must be either BaseModel or dataclass')
+    return model_cls
+
+
+def to_camel(string: str) -> str:
+    return ''.join(word.capitalize() for word in string.split('_'))
+
+
+def to_lower_camel(string: str) -> str:
+    if len(string) >= 1:
+        pascal_string = to_camel(string)
+        return pascal_string[0].lower() + pascal_string[1:]
+    return string.lower()
+
+
+T = TypeVar('T')
+
+
+def unique_list(
+    input_list: Union[List[T], Tuple[T, ...]],
+    *,
+    name_factory: Callable[[T], str] = str,
+) -> List[T]:
+    """
+    Make a list unique while maintaining order.
+    We update the list if another one with the same name is set
+    (e.g. root validator overridden in subclass)
+    """
+    result: List[T] = []
+    result_names: List[str] = []
+    for v in input_list:
+        v_name = name_factory(v)
+        if v_name not in result_names:
+            result_names.append(v_name)
+            result.append(v)
+        else:
+            result[result_names.index(v_name)] = v
+
+    return result
+
+
+class PyObjectStr(str):
+    """
+    String class where repr doesn't include quotes. Useful with Representation when you want to return a string
+    representation of something that valid (or pseudo-valid) python.
+    """
+
+    def __repr__(self) -> str:
+        return str(self)
+
+
+class Representation:
+    """
+    Mixin to provide __str__, __repr__, and __pretty__ methods. See #884 for more details.
+
+    __pretty__ is used by [devtools](https://python-devtools.helpmanual.io/) to provide human readable representations
+    of objects.
+    """
+
+    __slots__: Tuple[str, ...] = tuple()
+
+    def __repr_args__(self) -> 'ReprArgs':
+        """
+        Returns the attributes to show in __str__, __repr__, and __pretty__ this is generally overridden.
+
+        Can either return:
+        * name - value pairs, e.g.: `[('foo_name', 'foo'), ('bar_name', ['b', 'a', 'r'])]`
+        * or, just values, e.g.: `[(None, 'foo'), (None, ['b', 'a', 'r'])]`
+        """
+        attrs = ((s, getattr(self, s)) for s in self.__slots__)
+        return [(a, v) for a, v in attrs if v is not None]
+
+    def __repr_name__(self) -> str:
+        """
+        Name of the instance's class, used in __repr__.
+        """
+        return self.__class__.__name__
+
+    def __repr_str__(self, join_str: str) -> str:
+        return join_str.join(repr(v) if a is None else f'{a}={v!r}' for a, v in self.__repr_args__())
+
+    def __pretty__(self, fmt: Callable[[Any], Any], **kwargs: Any) -> Generator[Any, None, None]:
+        """
+        Used by devtools (https://python-devtools.helpmanual.io/) to provide a human readable representations of objects
+        """
+        yield self.__repr_name__() + '('
+        yield 1
+        for name, value in self.__repr_args__():
+            if name is not None:
+                yield name + '='
+            yield fmt(value)
+            yield ','
+            yield 0
+        yield -1
+        yield ')'
+
+    def __str__(self) -> str:
+        return self.__repr_str__(' ')
+
+    def __repr__(self) -> str:
+        return f'{self.__repr_name__()}({self.__repr_str__(", ")})'
+
+    def __rich_repr__(self) -> 'RichReprResult':
+        """Get fields for Rich library"""
+        for name, field_repr in self.__repr_args__():
+            if name is None:
+                yield field_repr
+            else:
+                yield name, field_repr
+
+
+class GetterDict(Representation):
+    """
+    Hack to make object's smell just enough like dicts for validate_model.
+
+    We can't inherit from Mapping[str, Any] because it upsets cython so we have to implement all methods ourselves.
+    """
+
+    __slots__ = ('_obj',)
+
+    def __init__(self, obj: Any):
+        self._obj = obj
+
+    def __getitem__(self, key: str) -> Any:
+        try:
+            return getattr(self._obj, key)
+        except AttributeError as e:
+            raise KeyError(key) from e
+
+    def get(self, key: Any, default: Any = None) -> Any:
+        return getattr(self._obj, key, default)
+
+    def extra_keys(self) -> Set[Any]:
+        """
+        We don't want to get any other attributes of obj if the model didn't explicitly ask for them
+        """
+        return set()
+
+    def keys(self) -> List[Any]:
+        """
+        Keys of the pseudo dictionary, uses a list not set so order information can be maintained like python
+        dictionaries.
+        """
+        return list(self)
+
+    def values(self) -> List[Any]:
+        return [self[k] for k in self]
+
+    def items(self) -> Iterator[Tuple[str, Any]]:
+        for k in self:
+            yield k, self.get(k)
+
+    def __iter__(self) -> Iterator[str]:
+        for name in dir(self._obj):
+            if not name.startswith('_'):
+                yield name
+
+    def __len__(self) -> int:
+        return sum(1 for _ in self)
+
+    def __contains__(self, item: Any) -> bool:
+        return item in self.keys()
+
+    def __eq__(self, other: Any) -> bool:
+        return dict(self) == dict(other.items())
+
+    def __repr_args__(self) -> 'ReprArgs':
+        return [(None, dict(self))]
+
+    def __repr_name__(self) -> str:
+        return f'GetterDict[{display_as_type(self._obj)}]'
+
+
+class ValueItems(Representation):
+    """
+    Class for more convenient calculation of excluded or included fields on values.
+    """
+
+    __slots__ = ('_items', '_type')
+
+    def __init__(self, value: Any, items: Union['AbstractSetIntStr', 'MappingIntStrAny']) -> None:
+        items = self._coerce_items(items)
+
+        if isinstance(value, (list, tuple)):
+            items = self._normalize_indexes(items, len(value))
+
+        self._items: 'MappingIntStrAny' = items
+
+    def is_excluded(self, item: Any) -> bool:
+        """
+        Check if item is fully excluded.
+
+        :param item: key or index of a value
+        """
+        return self.is_true(self._items.get(item))
+
+    def is_included(self, item: Any) -> bool:
+        """
+        Check if value is contained in self._items
+
+        :param item: key or index of value
+        """
+        return item in self._items
+
+    def for_element(self, e: 'IntStr') -> Optional[Union['AbstractSetIntStr', 'MappingIntStrAny']]:
+        """
+        :param e: key or index of element on value
+        :return: raw values for element if self._items is dict and contain needed element
+        """
+
+        item = self._items.get(e)
+        return item if not self.is_true(item) else None
+
+    def _normalize_indexes(self, items: 'MappingIntStrAny', v_length: int) -> 'DictIntStrAny':
+        """
+        :param items: dict or set of indexes which will be normalized
+        :param v_length: length of sequence indexes of which will be
+
+        >>> self._normalize_indexes({0: True, -2: True, -1: True}, 4)
+        {0: True, 2: True, 3: True}
+        >>> self._normalize_indexes({'__all__': True}, 4)
+        {0: True, 1: True, 2: True, 3: True}
+        """
+
+        normalized_items: 'DictIntStrAny' = {}
+        all_items = None
+        for i, v in items.items():
+            if not (isinstance(v, Mapping) or isinstance(v, AbstractSet) or self.is_true(v)):
+                raise TypeError(f'Unexpected type of exclude value for index "{i}" {v.__class__}')
+            if i == '__all__':
+                all_items = self._coerce_value(v)
+                continue
+            if not isinstance(i, int):
+                raise TypeError(
+                    'Excluding fields from a sequence of sub-models or dicts must be performed index-wise: '
+                    'expected integer keys or keyword "__all__"'
+                )
+            normalized_i = v_length + i if i < 0 else i
+            normalized_items[normalized_i] = self.merge(v, normalized_items.get(normalized_i))
+
+        if not all_items:
+            return normalized_items
+        if self.is_true(all_items):
+            for i in range(v_length):
+                normalized_items.setdefault(i, ...)
+            return normalized_items
+        for i in range(v_length):
+            normalized_item = normalized_items.setdefault(i, {})
+            if not self.is_true(normalized_item):
+                normalized_items[i] = self.merge(all_items, normalized_item)
+        return normalized_items
+
+    @classmethod
+    def merge(cls, base: Any, override: Any, intersect: bool = False) -> Any:
+        """
+        Merge a ``base`` item with an ``override`` item.
+
+        Both ``base`` and ``override`` are converted to dictionaries if possible.
+        Sets are converted to dictionaries with the sets entries as keys and
+        Ellipsis as values.
+
+        Each key-value pair existing in ``base`` is merged with ``override``,
+        while the rest of the key-value pairs are updated recursively with this function.
+
+        Merging takes place based on the "union" of keys if ``intersect`` is
+        set to ``False`` (default) and on the intersection of keys if
+        ``intersect`` is set to ``True``.
+        """
+        override = cls._coerce_value(override)
+        base = cls._coerce_value(base)
+        if override is None:
+            return base
+        if cls.is_true(base) or base is None:
+            return override
+        if cls.is_true(override):
+            return base if intersect else override
+
+        # intersection or union of keys while preserving ordering:
+        if intersect:
+            merge_keys = [k for k in base if k in override] + [k for k in override if k in base]
+        else:
+            merge_keys = list(base) + [k for k in override if k not in base]
+
+        merged: 'DictIntStrAny' = {}
+        for k in merge_keys:
+            merged_item = cls.merge(base.get(k), override.get(k), intersect=intersect)
+            if merged_item is not None:
+                merged[k] = merged_item
+
+        return merged
+
+    @staticmethod
+    def _coerce_items(items: Union['AbstractSetIntStr', 'MappingIntStrAny']) -> 'MappingIntStrAny':
+        if isinstance(items, Mapping):
+            pass
+        elif isinstance(items, AbstractSet):
+            items = dict.fromkeys(items, ...)
+        else:
+            class_name = getattr(items, '__class__', '???')
+            assert_never(
+                items,
+                f'Unexpected type of exclude value {class_name}',
+            )
+        return items
+
+    @classmethod
+    def _coerce_value(cls, value: Any) -> Any:
+        if value is None or cls.is_true(value):
+            return value
+        return cls._coerce_items(value)
+
+    @staticmethod
+    def is_true(v: Any) -> bool:
+        return v is True or v is ...
+
+    def __repr_args__(self) -> 'ReprArgs':
+        return [(None, self._items)]
+
+
+class ClassAttribute:
+    """
+    Hide class attribute from its instances
+    """
+
+    __slots__ = (
+        'name',
+        'value',
+    )
+
+    def __init__(self, name: str, value: Any) -> None:
+        self.name = name
+        self.value = value
+
+    def __get__(self, instance: Any, owner: Type[Any]) -> None:
+        if instance is None:
+            return self.value
+        raise AttributeError(f'{self.name!r} attribute of {owner.__name__!r} is class-only')
+
+
+path_types = {
+    'is_dir': 'directory',
+    'is_file': 'file',
+    'is_mount': 'mount point',
+    'is_symlink': 'symlink',
+    'is_block_device': 'block device',
+    'is_char_device': 'char device',
+    'is_fifo': 'FIFO',
+    'is_socket': 'socket',
+}
+
+
+def path_type(p: 'Path') -> str:
+    """
+    Find out what sort of thing a path is.
+    """
+    assert p.exists(), 'path does not exist'
+    for method, name in path_types.items():
+        if getattr(p, method)():
+            return name
+
+    return 'unknown'
+
+
+Obj = TypeVar('Obj')
+
+
+def smart_deepcopy(obj: Obj) -> Obj:
+    """
+    Return type as is for immutable built-in types
+    Use obj.copy() for built-in empty collections
+    Use copy.deepcopy() for non-empty collections and unknown objects
+    """
+
+    obj_type = obj.__class__
+    if obj_type in IMMUTABLE_NON_COLLECTIONS_TYPES:
+        return obj  # fastest case: obj is immutable and not collection therefore will not be copied anyway
+    try:
+        if not obj and obj_type in BUILTIN_COLLECTIONS:
+            # faster way for empty collections, no need to copy its members
+            return obj if obj_type is tuple else obj.copy()  # type: ignore  # tuple doesn't have copy method
+    except (TypeError, ValueError, RuntimeError):
+        # do we really dare to catch ALL errors? Seems a bit risky
+        pass
+
+    return deepcopy(obj)  # slowest way when we actually might need a deepcopy
+
+
+def is_valid_field(name: str) -> bool:
+    if not name.startswith('_'):
+        return True
+    return ROOT_KEY == name
+
+
+DUNDER_ATTRIBUTES = {
+    '__annotations__',
+    '__classcell__',
+    '__doc__',
+    '__module__',
+    '__orig_bases__',
+    '__orig_class__',
+    '__qualname__',
+}
+
+
+def is_valid_private_name(name: str) -> bool:
+    return not is_valid_field(name) and name not in DUNDER_ATTRIBUTES
+
+
+_EMPTY = object()
+
+
+def all_identical(left: Iterable[Any], right: Iterable[Any]) -> bool:
+    """
+    Check that the items of `left` are the same objects as those in `right`.
+
+    >>> a, b = object(), object()
+    >>> all_identical([a, b, a], [a, b, a])
+    True
+    >>> all_identical([a, b, [a]], [a, b, [a]])  # new list object, while "equal" is not "identical"
+    False
+    """
+    for left_item, right_item in zip_longest(left, right, fillvalue=_EMPTY):
+        if left_item is not right_item:
+            return False
+    return True
+
+
+def assert_never(obj: NoReturn, msg: str) -> NoReturn:
+    """
+    Helper to make sure that we have covered all possible types.
+
+    This is mostly useful for ``mypy``, docs:
+    https://mypy.readthedocs.io/en/latest/literal_types.html#exhaustive-checks
+    """
+    raise TypeError(msg)
+
+
+def get_unique_discriminator_alias(all_aliases: Collection[str], discriminator_key: str) -> str:
+    """Validate that all aliases are the same and if that's the case return the alias"""
+    unique_aliases = set(all_aliases)
+    if len(unique_aliases) > 1:
+        raise ConfigError(
+            f'Aliases for discriminator {discriminator_key!r} must be the same (got {", ".join(sorted(all_aliases))})'
+        )
+    return unique_aliases.pop()
+
+
+def get_discriminator_alias_and_values(tp: Any, discriminator_key: str) -> Tuple[str, Tuple[str, ...]]:
+    """
+    Get alias and all valid values in the `Literal` type of the discriminator field
+    `tp` can be a `BaseModel` class or directly an `Annotated` `Union` of many.
+    """
+    is_root_model = getattr(tp, '__custom_root_type__', False)
+
+    if get_origin(tp) is Annotated:
+        tp = get_args(tp)[0]
+
+    if hasattr(tp, '__pydantic_model__'):
+        tp = tp.__pydantic_model__
+
+    if is_union(get_origin(tp)):
+        alias, all_values = _get_union_alias_and_all_values(tp, discriminator_key)
+        return alias, tuple(v for values in all_values for v in values)
+    elif is_root_model:
+        union_type = tp.__fields__[ROOT_KEY].type_
+        alias, all_values = _get_union_alias_and_all_values(union_type, discriminator_key)
+
+        if len(set(all_values)) > 1:
+            raise ConfigError(
+                f'Field {discriminator_key!r} is not the same for all submodels of {display_as_type(tp)!r}'
+            )
+
+        return alias, all_values[0]
+
+    else:
+        try:
+            t_discriminator_type = tp.__fields__[discriminator_key].type_
+        except AttributeError as e:
+            raise TypeError(f'Type {tp.__name__!r} is not a valid `BaseModel` or `dataclass`') from e
+        except KeyError as e:
+            raise ConfigError(f'Model {tp.__name__!r} needs a discriminator field for key {discriminator_key!r}') from e
+
+        if not is_literal_type(t_discriminator_type):
+            raise ConfigError(f'Field {discriminator_key!r} of model {tp.__name__!r} needs to be a `Literal`')
+
+        return tp.__fields__[discriminator_key].alias, all_literal_values(t_discriminator_type)
+
+
+def _get_union_alias_and_all_values(
+    union_type: Type[Any], discriminator_key: str
+) -> Tuple[str, Tuple[Tuple[str, ...], ...]]:
+    zipped_aliases_values = [get_discriminator_alias_and_values(t, discriminator_key) for t in get_args(union_type)]
+    # unzip: [('alias_a',('v1', 'v2)), ('alias_b', ('v3',))] => [('alias_a', 'alias_b'), (('v1', 'v2'), ('v3',))]
+    all_aliases, all_values = zip(*zipped_aliases_values)
+    return get_unique_discriminator_alias(all_aliases, discriminator_key), all_values
diff --git a/.venv/lib/python3.12/site-packages/pydantic/v1/validators.py b/.venv/lib/python3.12/site-packages/pydantic/v1/validators.py
new file mode 100644
index 00000000..c0940e81
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pydantic/v1/validators.py
@@ -0,0 +1,768 @@
+import math
+import re
+from collections import OrderedDict, deque
+from collections.abc import Hashable as CollectionsHashable
+from datetime import date, datetime, time, timedelta
+from decimal import Decimal, DecimalException
+from enum import Enum, IntEnum
+from ipaddress import IPv4Address, IPv4Interface, IPv4Network, IPv6Address, IPv6Interface, IPv6Network
+from pathlib import Path
+from typing import (
+    TYPE_CHECKING,
+    Any,
+    Callable,
+    Deque,
+    Dict,
+    ForwardRef,
+    FrozenSet,
+    Generator,
+    Hashable,
+    List,
+    NamedTuple,
+    Pattern,
+    Set,
+    Tuple,
+    Type,
+    TypeVar,
+    Union,
+)
+from uuid import UUID
+from warnings import warn
+
+from pydantic.v1 import errors
+from pydantic.v1.datetime_parse import parse_date, parse_datetime, parse_duration, parse_time
+from pydantic.v1.typing import (
+    AnyCallable,
+    all_literal_values,
+    display_as_type,
+    get_class,
+    is_callable_type,
+    is_literal_type,
+    is_namedtuple,
+    is_none_type,
+    is_typeddict,
+)
+from pydantic.v1.utils import almost_equal_floats, lenient_issubclass, sequence_like
+
+if TYPE_CHECKING:
+    from typing_extensions import Literal, TypedDict
+
+    from pydantic.v1.config import BaseConfig
+    from pydantic.v1.fields import ModelField
+    from pydantic.v1.types import ConstrainedDecimal, ConstrainedFloat, ConstrainedInt
+
+    ConstrainedNumber = Union[ConstrainedDecimal, ConstrainedFloat, ConstrainedInt]
+    AnyOrderedDict = OrderedDict[Any, Any]
+    Number = Union[int, float, Decimal]
+    StrBytes = Union[str, bytes]
+
+
+def str_validator(v: Any) -> Union[str]:
+    if isinstance(v, str):
+        if isinstance(v, Enum):
+            return v.value
+        else:
+            return v
+    elif isinstance(v, (float, int, Decimal)):
+        # is there anything else we want to add here? If you think so, create an issue.
+        return str(v)
+    elif isinstance(v, (bytes, bytearray)):
+        return v.decode()
+    else:
+        raise errors.StrError()
+
+
+def strict_str_validator(v: Any) -> Union[str]:
+    if isinstance(v, str) and not isinstance(v, Enum):
+        return v
+    raise errors.StrError()
+
+
+def bytes_validator(v: Any) -> Union[bytes]:
+    if isinstance(v, bytes):
+        return v
+    elif isinstance(v, bytearray):
+        return bytes(v)
+    elif isinstance(v, str):
+        return v.encode()
+    elif isinstance(v, (float, int, Decimal)):
+        return str(v).encode()
+    else:
+        raise errors.BytesError()
+
+
+def strict_bytes_validator(v: Any) -> Union[bytes]:
+    if isinstance(v, bytes):
+        return v
+    elif isinstance(v, bytearray):
+        return bytes(v)
+    else:
+        raise errors.BytesError()
+
+
+BOOL_FALSE = {0, '0', 'off', 'f', 'false', 'n', 'no'}
+BOOL_TRUE = {1, '1', 'on', 't', 'true', 'y', 'yes'}
+
+
+def bool_validator(v: Any) -> bool:
+    if v is True or v is False:
+        return v
+    if isinstance(v, bytes):
+        v = v.decode()
+    if isinstance(v, str):
+        v = v.lower()
+    try:
+        if v in BOOL_TRUE:
+            return True
+        if v in BOOL_FALSE:
+            return False
+    except TypeError:
+        raise errors.BoolError()
+    raise errors.BoolError()
+
+
+# matches the default limit cpython, see https://github.com/python/cpython/pull/96500
+max_str_int = 4_300
+
+
+def int_validator(v: Any) -> int:
+    if isinstance(v, int) and not (v is True or v is False):
+        return v
+
+    # see https://github.com/pydantic/pydantic/issues/1477 and in turn, https://github.com/python/cpython/issues/95778
+    # this check should be unnecessary once patch releases are out for 3.7, 3.8, 3.9 and 3.10
+    # but better to check here until then.
+    # NOTICE: this does not fully protect user from the DOS risk since the standard library JSON implementation
+    # (and other std lib modules like xml) use `int()` and are likely called before this, the best workaround is to
+    # 1. update to the latest patch release of python once released, 2. use a different JSON library like ujson
+    if isinstance(v, (str, bytes, bytearray)) and len(v) > max_str_int:
+        raise errors.IntegerError()
+
+    try:
+        return int(v)
+    except (TypeError, ValueError, OverflowError):
+        raise errors.IntegerError()
+
+
+def strict_int_validator(v: Any) -> int:
+    if isinstance(v, int) and not (v is True or v is False):
+        return v
+    raise errors.IntegerError()
+
+
+def float_validator(v: Any) -> float:
+    if isinstance(v, float):
+        return v
+
+    try:
+        return float(v)
+    except (TypeError, ValueError):
+        raise errors.FloatError()
+
+
+def strict_float_validator(v: Any) -> float:
+    if isinstance(v, float):
+        return v
+    raise errors.FloatError()
+
+
+def float_finite_validator(v: 'Number', field: 'ModelField', config: 'BaseConfig') -> 'Number':
+    allow_inf_nan = getattr(field.type_, 'allow_inf_nan', None)
+    if allow_inf_nan is None:
+        allow_inf_nan = config.allow_inf_nan
+
+    if allow_inf_nan is False and (math.isnan(v) or math.isinf(v)):
+        raise errors.NumberNotFiniteError()
+    return v
+
+
+def number_multiple_validator(v: 'Number', field: 'ModelField') -> 'Number':
+    field_type: ConstrainedNumber = field.type_
+    if field_type.multiple_of is not None:
+        mod = float(v) / float(field_type.multiple_of) % 1
+        if not almost_equal_floats(mod, 0.0) and not almost_equal_floats(mod, 1.0):
+            raise errors.NumberNotMultipleError(multiple_of=field_type.multiple_of)
+    return v
+
+
+def number_size_validator(v: 'Number', field: 'ModelField') -> 'Number':
+    field_type: ConstrainedNumber = field.type_
+    if field_type.gt is not None and not v > field_type.gt:
+        raise errors.NumberNotGtError(limit_value=field_type.gt)
+    elif field_type.ge is not None and not v >= field_type.ge:
+        raise errors.NumberNotGeError(limit_value=field_type.ge)
+
+    if field_type.lt is not None and not v < field_type.lt:
+        raise errors.NumberNotLtError(limit_value=field_type.lt)
+    if field_type.le is not None and not v <= field_type.le:
+        raise errors.NumberNotLeError(limit_value=field_type.le)
+
+    return v
+
+
+def constant_validator(v: 'Any', field: 'ModelField') -> 'Any':
+    """Validate ``const`` fields.
+
+    The value provided for a ``const`` field must be equal to the default value
+    of the field. This is to support the keyword of the same name in JSON
+    Schema.
+    """
+    if v != field.default:
+        raise errors.WrongConstantError(given=v, permitted=[field.default])
+
+    return v
+
+
+def anystr_length_validator(v: 'StrBytes', config: 'BaseConfig') -> 'StrBytes':
+    v_len = len(v)
+
+    min_length = config.min_anystr_length
+    if v_len < min_length:
+        raise errors.AnyStrMinLengthError(limit_value=min_length)
+
+    max_length = config.max_anystr_length
+    if max_length is not None and v_len > max_length:
+        raise errors.AnyStrMaxLengthError(limit_value=max_length)
+
+    return v
+
+
+def anystr_strip_whitespace(v: 'StrBytes') -> 'StrBytes':
+    return v.strip()
+
+
+def anystr_upper(v: 'StrBytes') -> 'StrBytes':
+    return v.upper()
+
+
+def anystr_lower(v: 'StrBytes') -> 'StrBytes':
+    return v.lower()
+
+
+def ordered_dict_validator(v: Any) -> 'AnyOrderedDict':
+    if isinstance(v, OrderedDict):
+        return v
+
+    try:
+        return OrderedDict(v)
+    except (TypeError, ValueError):
+        raise errors.DictError()
+
+
+def dict_validator(v: Any) -> Dict[Any, Any]:
+    if isinstance(v, dict):
+        return v
+
+    try:
+        return dict(v)
+    except (TypeError, ValueError):
+        raise errors.DictError()
+
+
+def list_validator(v: Any) -> List[Any]:
+    if isinstance(v, list):
+        return v
+    elif sequence_like(v):
+        return list(v)
+    else:
+        raise errors.ListError()
+
+
+def tuple_validator(v: Any) -> Tuple[Any, ...]:
+    if isinstance(v, tuple):
+        return v
+    elif sequence_like(v):
+        return tuple(v)
+    else:
+        raise errors.TupleError()
+
+
+def set_validator(v: Any) -> Set[Any]:
+    if isinstance(v, set):
+        return v
+    elif sequence_like(v):
+        return set(v)
+    else:
+        raise errors.SetError()
+
+
+def frozenset_validator(v: Any) -> FrozenSet[Any]:
+    if isinstance(v, frozenset):
+        return v
+    elif sequence_like(v):
+        return frozenset(v)
+    else:
+        raise errors.FrozenSetError()
+
+
+def deque_validator(v: Any) -> Deque[Any]:
+    if isinstance(v, deque):
+        return v
+    elif sequence_like(v):
+        return deque(v)
+    else:
+        raise errors.DequeError()
+
+
+def enum_member_validator(v: Any, field: 'ModelField', config: 'BaseConfig') -> Enum:
+    try:
+        enum_v = field.type_(v)
+    except ValueError:
+        # field.type_ should be an enum, so will be iterable
+        raise errors.EnumMemberError(enum_values=list(field.type_))
+    return enum_v.value if config.use_enum_values else enum_v
+
+
+def uuid_validator(v: Any, field: 'ModelField') -> UUID:
+    try:
+        if isinstance(v, str):
+            v = UUID(v)
+        elif isinstance(v, (bytes, bytearray)):
+            try:
+                v = UUID(v.decode())
+            except ValueError:
+                # 16 bytes in big-endian order as the bytes argument fail
+                # the above check
+                v = UUID(bytes=v)
+    except ValueError:
+        raise errors.UUIDError()
+
+    if not isinstance(v, UUID):
+        raise errors.UUIDError()
+
+    required_version = getattr(field.type_, '_required_version', None)
+    if required_version and v.version != required_version:
+        raise errors.UUIDVersionError(required_version=required_version)
+
+    return v
+
+
+def decimal_validator(v: Any) -> Decimal:
+    if isinstance(v, Decimal):
+        return v
+    elif isinstance(v, (bytes, bytearray)):
+        v = v.decode()
+
+    v = str(v).strip()
+
+    try:
+        v = Decimal(v)
+    except DecimalException:
+        raise errors.DecimalError()
+
+    if not v.is_finite():
+        raise errors.DecimalIsNotFiniteError()
+
+    return v
+
+
+def hashable_validator(v: Any) -> Hashable:
+    if isinstance(v, Hashable):
+        return v
+
+    raise errors.HashableError()
+
+
+def ip_v4_address_validator(v: Any) -> IPv4Address:
+    if isinstance(v, IPv4Address):
+        return v
+
+    try:
+        return IPv4Address(v)
+    except ValueError:
+        raise errors.IPv4AddressError()
+
+
+def ip_v6_address_validator(v: Any) -> IPv6Address:
+    if isinstance(v, IPv6Address):
+        return v
+
+    try:
+        return IPv6Address(v)
+    except ValueError:
+        raise errors.IPv6AddressError()
+
+
+def ip_v4_network_validator(v: Any) -> IPv4Network:
+    """
+    Assume IPv4Network initialised with a default ``strict`` argument
+
+    See more:
+    https://docs.python.org/library/ipaddress.html#ipaddress.IPv4Network
+    """
+    if isinstance(v, IPv4Network):
+        return v
+
+    try:
+        return IPv4Network(v)
+    except ValueError:
+        raise errors.IPv4NetworkError()
+
+
+def ip_v6_network_validator(v: Any) -> IPv6Network:
+    """
+    Assume IPv6Network initialised with a default ``strict`` argument
+
+    See more:
+    https://docs.python.org/library/ipaddress.html#ipaddress.IPv6Network
+    """
+    if isinstance(v, IPv6Network):
+        return v
+
+    try:
+        return IPv6Network(v)
+    except ValueError:
+        raise errors.IPv6NetworkError()
+
+
+def ip_v4_interface_validator(v: Any) -> IPv4Interface:
+    if isinstance(v, IPv4Interface):
+        return v
+
+    try:
+        return IPv4Interface(v)
+    except ValueError:
+        raise errors.IPv4InterfaceError()
+
+
+def ip_v6_interface_validator(v: Any) -> IPv6Interface:
+    if isinstance(v, IPv6Interface):
+        return v
+
+    try:
+        return IPv6Interface(v)
+    except ValueError:
+        raise errors.IPv6InterfaceError()
+
+
+def path_validator(v: Any) -> Path:
+    if isinstance(v, Path):
+        return v
+
+    try:
+        return Path(v)
+    except TypeError:
+        raise errors.PathError()
+
+
+def path_exists_validator(v: Any) -> Path:
+    if not v.exists():
+        raise errors.PathNotExistsError(path=v)
+
+    return v
+
+
+def callable_validator(v: Any) -> AnyCallable:
+    """
+    Perform a simple check if the value is callable.
+
+    Note: complete matching of argument type hints and return types is not performed
+    """
+    if callable(v):
+        return v
+
+    raise errors.CallableError(value=v)
+
+
+def enum_validator(v: Any) -> Enum:
+    if isinstance(v, Enum):
+        return v
+
+    raise errors.EnumError(value=v)
+
+
+def int_enum_validator(v: Any) -> IntEnum:
+    if isinstance(v, IntEnum):
+        return v
+
+    raise errors.IntEnumError(value=v)
+
+
+def make_literal_validator(type_: Any) -> Callable[[Any], Any]:
+    permitted_choices = all_literal_values(type_)
+
+    # To have a O(1) complexity and still return one of the values set inside the `Literal`,
+    # we create a dict with the set values (a set causes some problems with the way intersection works).
+    # In some cases the set value and checked value can indeed be different (see `test_literal_validator_str_enum`)
+    allowed_choices = {v: v for v in permitted_choices}
+
+    def literal_validator(v: Any) -> Any:
+        try:
+            return allowed_choices[v]
+        except (KeyError, TypeError):
+            raise errors.WrongConstantError(given=v, permitted=permitted_choices)
+
+    return literal_validator
+
+
+def constr_length_validator(v: 'StrBytes', field: 'ModelField', config: 'BaseConfig') -> 'StrBytes':
+    v_len = len(v)
+
+    min_length = field.type_.min_length if field.type_.min_length is not None else config.min_anystr_length
+    if v_len < min_length:
+        raise errors.AnyStrMinLengthError(limit_value=min_length)
+
+    max_length = field.type_.max_length if field.type_.max_length is not None else config.max_anystr_length
+    if max_length is not None and v_len > max_length:
+        raise errors.AnyStrMaxLengthError(limit_value=max_length)
+
+    return v
+
+
+def constr_strip_whitespace(v: 'StrBytes', field: 'ModelField', config: 'BaseConfig') -> 'StrBytes':
+    strip_whitespace = field.type_.strip_whitespace or config.anystr_strip_whitespace
+    if strip_whitespace:
+        v = v.strip()
+
+    return v
+
+
+def constr_upper(v: 'StrBytes', field: 'ModelField', config: 'BaseConfig') -> 'StrBytes':
+    upper = field.type_.to_upper or config.anystr_upper
+    if upper:
+        v = v.upper()
+
+    return v
+
+
+def constr_lower(v: 'StrBytes', field: 'ModelField', config: 'BaseConfig') -> 'StrBytes':
+    lower = field.type_.to_lower or config.anystr_lower
+    if lower:
+        v = v.lower()
+    return v
+
+
+def validate_json(v: Any, config: 'BaseConfig') -> Any:
+    if v is None:
+        # pass None through to other validators
+        return v
+    try:
+        return config.json_loads(v)  # type: ignore
+    except ValueError:
+        raise errors.JsonError()
+    except TypeError:
+        raise errors.JsonTypeError()
+
+
+T = TypeVar('T')
+
+
+def make_arbitrary_type_validator(type_: Type[T]) -> Callable[[T], T]:
+    def arbitrary_type_validator(v: Any) -> T:
+        if isinstance(v, type_):
+            return v
+        raise errors.ArbitraryTypeError(expected_arbitrary_type=type_)
+
+    return arbitrary_type_validator
+
+
+def make_class_validator(type_: Type[T]) -> Callable[[Any], Type[T]]:
+    def class_validator(v: Any) -> Type[T]:
+        if lenient_issubclass(v, type_):
+            return v
+        raise errors.SubclassError(expected_class=type_)
+
+    return class_validator
+
+
+def any_class_validator(v: Any) -> Type[T]:
+    if isinstance(v, type):
+        return v
+    raise errors.ClassError()
+
+
+def none_validator(v: Any) -> 'Literal[None]':
+    if v is None:
+        return v
+    raise errors.NotNoneError()
+
+
+def pattern_validator(v: Any) -> Pattern[str]:
+    if isinstance(v, Pattern):
+        return v
+
+    str_value = str_validator(v)
+
+    try:
+        return re.compile(str_value)
+    except re.error:
+        raise errors.PatternError()
+
+
+NamedTupleT = TypeVar('NamedTupleT', bound=NamedTuple)
+
+
+def make_namedtuple_validator(
+    namedtuple_cls: Type[NamedTupleT], config: Type['BaseConfig']
+) -> Callable[[Tuple[Any, ...]], NamedTupleT]:
+    from pydantic.v1.annotated_types import create_model_from_namedtuple
+
+    NamedTupleModel = create_model_from_namedtuple(
+        namedtuple_cls,
+        __config__=config,
+        __module__=namedtuple_cls.__module__,
+    )
+    namedtuple_cls.__pydantic_model__ = NamedTupleModel  # type: ignore[attr-defined]
+
+    def namedtuple_validator(values: Tuple[Any, ...]) -> NamedTupleT:
+        annotations = NamedTupleModel.__annotations__
+
+        if len(values) > len(annotations):
+            raise errors.ListMaxLengthError(limit_value=len(annotations))
+
+        dict_values: Dict[str, Any] = dict(zip(annotations, values))
+        validated_dict_values: Dict[str, Any] = dict(NamedTupleModel(**dict_values))
+        return namedtuple_cls(**validated_dict_values)
+
+    return namedtuple_validator
+
+
+def make_typeddict_validator(
+    typeddict_cls: Type['TypedDict'], config: Type['BaseConfig']  # type: ignore[valid-type]
+) -> Callable[[Any], Dict[str, Any]]:
+    from pydantic.v1.annotated_types import create_model_from_typeddict
+
+    TypedDictModel = create_model_from_typeddict(
+        typeddict_cls,
+        __config__=config,
+        __module__=typeddict_cls.__module__,
+    )
+    typeddict_cls.__pydantic_model__ = TypedDictModel  # type: ignore[attr-defined]
+
+    def typeddict_validator(values: 'TypedDict') -> Dict[str, Any]:  # type: ignore[valid-type]
+        return TypedDictModel.parse_obj(values).dict(exclude_unset=True)
+
+    return typeddict_validator
+
+
+class IfConfig:
+    def __init__(self, validator: AnyCallable, *config_attr_names: str, ignored_value: Any = False) -> None:
+        self.validator = validator
+        self.config_attr_names = config_attr_names
+        self.ignored_value = ignored_value
+
+    def check(self, config: Type['BaseConfig']) -> bool:
+        return any(getattr(config, name) not in {None, self.ignored_value} for name in self.config_attr_names)
+
+
+# order is important here, for example: bool is a subclass of int so has to come first, datetime before date same,
+# IPv4Interface before IPv4Address, etc
+_VALIDATORS: List[Tuple[Type[Any], List[Any]]] = [
+    (IntEnum, [int_validator, enum_member_validator]),
+    (Enum, [enum_member_validator]),
+    (
+        str,
+        [
+            str_validator,
+            IfConfig(anystr_strip_whitespace, 'anystr_strip_whitespace'),
+            IfConfig(anystr_upper, 'anystr_upper'),
+            IfConfig(anystr_lower, 'anystr_lower'),
+            IfConfig(anystr_length_validator, 'min_anystr_length', 'max_anystr_length'),
+        ],
+    ),
+    (
+        bytes,
+        [
+            bytes_validator,
+            IfConfig(anystr_strip_whitespace, 'anystr_strip_whitespace'),
+            IfConfig(anystr_upper, 'anystr_upper'),
+            IfConfig(anystr_lower, 'anystr_lower'),
+            IfConfig(anystr_length_validator, 'min_anystr_length', 'max_anystr_length'),
+        ],
+    ),
+    (bool, [bool_validator]),
+    (int, [int_validator]),
+    (float, [float_validator, IfConfig(float_finite_validator, 'allow_inf_nan', ignored_value=True)]),
+    (Path, [path_validator]),
+    (datetime, [parse_datetime]),
+    (date, [parse_date]),
+    (time, [parse_time]),
+    (timedelta, [parse_duration]),
+    (OrderedDict, [ordered_dict_validator]),
+    (dict, [dict_validator]),
+    (list, [list_validator]),
+    (tuple, [tuple_validator]),
+    (set, [set_validator]),
+    (frozenset, [frozenset_validator]),
+    (deque, [deque_validator]),
+    (UUID, [uuid_validator]),
+    (Decimal, [decimal_validator]),
+    (IPv4Interface, [ip_v4_interface_validator]),
+    (IPv6Interface, [ip_v6_interface_validator]),
+    (IPv4Address, [ip_v4_address_validator]),
+    (IPv6Address, [ip_v6_address_validator]),
+    (IPv4Network, [ip_v4_network_validator]),
+    (IPv6Network, [ip_v6_network_validator]),
+]
+
+
+def find_validators(  # noqa: C901 (ignore complexity)
+    type_: Type[Any], config: Type['BaseConfig']
+) -> Generator[AnyCallable, None, None]:
+    from pydantic.v1.dataclasses import is_builtin_dataclass, make_dataclass_validator
+
+    if type_ is Any or type_ is object:
+        return
+    type_type = type_.__class__
+    if type_type == ForwardRef or type_type == TypeVar:
+        return
+
+    if is_none_type(type_):
+        yield none_validator
+        return
+    if type_ is Pattern or type_ is re.Pattern:
+        yield pattern_validator
+        return
+    if type_ is Hashable or type_ is CollectionsHashable:
+        yield hashable_validator
+        return
+    if is_callable_type(type_):
+        yield callable_validator
+        return
+    if is_literal_type(type_):
+        yield make_literal_validator(type_)
+        return
+    if is_builtin_dataclass(type_):
+        yield from make_dataclass_validator(type_, config)
+        return
+    if type_ is Enum:
+        yield enum_validator
+        return
+    if type_ is IntEnum:
+        yield int_enum_validator
+        return
+    if is_namedtuple(type_):
+        yield tuple_validator
+        yield make_namedtuple_validator(type_, config)
+        return
+    if is_typeddict(type_):
+        yield make_typeddict_validator(type_, config)
+        return
+
+    class_ = get_class(type_)
+    if class_ is not None:
+        if class_ is not Any and isinstance(class_, type):
+            yield make_class_validator(class_)
+        else:
+            yield any_class_validator
+        return
+
+    for val_type, validators in _VALIDATORS:
+        try:
+            if issubclass(type_, val_type):
+                for v in validators:
+                    if isinstance(v, IfConfig):
+                        if v.check(config):
+                            yield v.validator
+                    else:
+                        yield v
+                return
+        except TypeError:
+            raise RuntimeError(f'error checking inheritance of {type_!r} (type: {display_as_type(type_)})')
+
+    if config.arbitrary_types_allowed:
+        yield make_arbitrary_type_validator(type_)
+    else:
+        if hasattr(type_, '__pydantic_core_schema__'):
+            warn(f'Mixing V1 and V2 models is not supported. `{type_.__name__}` is a V2 model.', UserWarning)
+        raise RuntimeError(f'no validator found for {type_}, see `arbitrary_types_allowed` in Config')
diff --git a/.venv/lib/python3.12/site-packages/pydantic/v1/version.py b/.venv/lib/python3.12/site-packages/pydantic/v1/version.py
new file mode 100644
index 00000000..47be0b75
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/pydantic/v1/version.py
@@ -0,0 +1,38 @@
+__all__ = 'compiled', 'VERSION', 'version_info'
+
+VERSION = '1.10.19'
+
+try:
+    import cython  # type: ignore
+except ImportError:
+    compiled: bool = False
+else:  # pragma: no cover
+    try:
+        compiled = cython.compiled
+    except AttributeError:
+        compiled = False
+
+
+def version_info() -> str:
+    import platform
+    import sys
+    from importlib import import_module
+    from pathlib import Path
+
+    optional_deps = []
+    for p in ('devtools', 'dotenv', 'email-validator', 'typing-extensions'):
+        try:
+            import_module(p.replace('-', '_'))
+        except ImportError:
+            continue
+        optional_deps.append(p)
+
+    info = {
+        'pydantic version': VERSION,
+        'pydantic compiled': compiled,
+        'install path': Path(__file__).resolve().parent,
+        'python version': sys.version,
+        'platform': platform.platform(),
+        'optional deps. installed': optional_deps,
+    }
+    return '\n'.join('{:>30} {}'.format(k + ':', str(v).replace('\n', ' ')) for k, v in info.items())