about summary refs log tree commit diff
path: root/.venv/lib/python3.12/site-packages/attr
diff options
context:
space:
mode:
Diffstat (limited to '.venv/lib/python3.12/site-packages/attr')
-rw-r--r--.venv/lib/python3.12/site-packages/attr/__init__.py104
-rw-r--r--.venv/lib/python3.12/site-packages/attr/__init__.pyi389
-rw-r--r--.venv/lib/python3.12/site-packages/attr/_cmp.py160
-rw-r--r--.venv/lib/python3.12/site-packages/attr/_cmp.pyi13
-rw-r--r--.venv/lib/python3.12/site-packages/attr/_compat.py94
-rw-r--r--.venv/lib/python3.12/site-packages/attr/_config.py31
-rw-r--r--.venv/lib/python3.12/site-packages/attr/_funcs.py468
-rw-r--r--.venv/lib/python3.12/site-packages/attr/_make.py3123
-rw-r--r--.venv/lib/python3.12/site-packages/attr/_next_gen.py623
-rw-r--r--.venv/lib/python3.12/site-packages/attr/_typing_compat.pyi15
-rw-r--r--.venv/lib/python3.12/site-packages/attr/_version_info.py86
-rw-r--r--.venv/lib/python3.12/site-packages/attr/_version_info.pyi9
-rw-r--r--.venv/lib/python3.12/site-packages/attr/converters.py162
-rw-r--r--.venv/lib/python3.12/site-packages/attr/converters.pyi19
-rw-r--r--.venv/lib/python3.12/site-packages/attr/exceptions.py95
-rw-r--r--.venv/lib/python3.12/site-packages/attr/exceptions.pyi17
-rw-r--r--.venv/lib/python3.12/site-packages/attr/filters.py72
-rw-r--r--.venv/lib/python3.12/site-packages/attr/filters.pyi6
-rw-r--r--.venv/lib/python3.12/site-packages/attr/py.typed0
-rw-r--r--.venv/lib/python3.12/site-packages/attr/setters.py79
-rw-r--r--.venv/lib/python3.12/site-packages/attr/setters.pyi20
-rw-r--r--.venv/lib/python3.12/site-packages/attr/validators.py710
-rw-r--r--.venv/lib/python3.12/site-packages/attr/validators.pyi86
23 files changed, 6381 insertions, 0 deletions
diff --git a/.venv/lib/python3.12/site-packages/attr/__init__.py b/.venv/lib/python3.12/site-packages/attr/__init__.py
new file mode 100644
index 00000000..5c6e0650
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/attr/__init__.py
@@ -0,0 +1,104 @@
+# SPDX-License-Identifier: MIT
+
+"""
+Classes Without Boilerplate
+"""
+
+from functools import partial
+from typing import Callable, Literal, Protocol
+
+from . import converters, exceptions, filters, setters, validators
+from ._cmp import cmp_using
+from ._config import get_run_validators, set_run_validators
+from ._funcs import asdict, assoc, astuple, has, resolve_types
+from ._make import (
+    NOTHING,
+    Attribute,
+    Converter,
+    Factory,
+    _Nothing,
+    attrib,
+    attrs,
+    evolve,
+    fields,
+    fields_dict,
+    make_class,
+    validate,
+)
+from ._next_gen import define, field, frozen, mutable
+from ._version_info import VersionInfo
+
+
+s = attributes = attrs
+ib = attr = attrib
+dataclass = partial(attrs, auto_attribs=True)  # happy Easter ;)
+
+
+class AttrsInstance(Protocol):
+    pass
+
+
+NothingType = Literal[_Nothing.NOTHING]
+
+__all__ = [
+    "NOTHING",
+    "Attribute",
+    "AttrsInstance",
+    "Converter",
+    "Factory",
+    "NothingType",
+    "asdict",
+    "assoc",
+    "astuple",
+    "attr",
+    "attrib",
+    "attributes",
+    "attrs",
+    "cmp_using",
+    "converters",
+    "define",
+    "evolve",
+    "exceptions",
+    "field",
+    "fields",
+    "fields_dict",
+    "filters",
+    "frozen",
+    "get_run_validators",
+    "has",
+    "ib",
+    "make_class",
+    "mutable",
+    "resolve_types",
+    "s",
+    "set_run_validators",
+    "setters",
+    "validate",
+    "validators",
+]
+
+
+def _make_getattr(mod_name: str) -> Callable:
+    """
+    Create a metadata proxy for packaging information that uses *mod_name* in
+    its warnings and errors.
+    """
+
+    def __getattr__(name: str) -> str:
+        if name not in ("__version__", "__version_info__"):
+            msg = f"module {mod_name} has no attribute {name}"
+            raise AttributeError(msg)
+
+        from importlib.metadata import metadata
+
+        meta = metadata("attrs")
+
+        if name == "__version_info__":
+            return VersionInfo._from_version_string(meta["version"])
+
+        return meta["version"]
+
+    return __getattr__
+
+
+__getattr__ = _make_getattr(__name__)
diff --git a/.venv/lib/python3.12/site-packages/attr/__init__.pyi b/.venv/lib/python3.12/site-packages/attr/__init__.pyi
new file mode 100644
index 00000000..133e5010
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/attr/__init__.pyi
@@ -0,0 +1,389 @@
+import enum
+import sys
+
+from typing import (
+    Any,
+    Callable,
+    Generic,
+    Literal,
+    Mapping,
+    Protocol,
+    Sequence,
+    TypeVar,
+    overload,
+)
+
+# `import X as X` is required to make these public
+from . import converters as converters
+from . import exceptions as exceptions
+from . import filters as filters
+from . import setters as setters
+from . import validators as validators
+from ._cmp import cmp_using as cmp_using
+from ._typing_compat import AttrsInstance_
+from ._version_info import VersionInfo
+from attrs import (
+    define as define,
+    field as field,
+    mutable as mutable,
+    frozen as frozen,
+    _EqOrderType,
+    _ValidatorType,
+    _ConverterType,
+    _ReprArgType,
+    _OnSetAttrType,
+    _OnSetAttrArgType,
+    _FieldTransformer,
+    _ValidatorArgType,
+)
+
+if sys.version_info >= (3, 10):
+    from typing import TypeGuard, TypeAlias
+else:
+    from typing_extensions import TypeGuard, TypeAlias
+
+if sys.version_info >= (3, 11):
+    from typing import dataclass_transform
+else:
+    from typing_extensions import dataclass_transform
+
+__version__: str
+__version_info__: VersionInfo
+__title__: str
+__description__: str
+__url__: str
+__uri__: str
+__author__: str
+__email__: str
+__license__: str
+__copyright__: str
+
+_T = TypeVar("_T")
+_C = TypeVar("_C", bound=type)
+
+_FilterType = Callable[["Attribute[_T]", _T], bool]
+
+# We subclass this here to keep the protocol's qualified name clean.
+class AttrsInstance(AttrsInstance_, Protocol):
+    pass
+
+_A = TypeVar("_A", bound=type[AttrsInstance])
+
+class _Nothing(enum.Enum):
+    NOTHING = enum.auto()
+
+NOTHING = _Nothing.NOTHING
+NothingType: TypeAlias = Literal[_Nothing.NOTHING]
+
+# NOTE: Factory lies about its return type to make this possible:
+# `x: List[int] # = Factory(list)`
+# Work around mypy issue #4554 in the common case by using an overload.
+
+@overload
+def Factory(factory: Callable[[], _T]) -> _T: ...
+@overload
+def Factory(
+    factory: Callable[[Any], _T],
+    takes_self: Literal[True],
+) -> _T: ...
+@overload
+def Factory(
+    factory: Callable[[], _T],
+    takes_self: Literal[False],
+) -> _T: ...
+
+In = TypeVar("In")
+Out = TypeVar("Out")
+
+class Converter(Generic[In, Out]):
+    @overload
+    def __init__(self, converter: Callable[[In], Out]) -> None: ...
+    @overload
+    def __init__(
+        self,
+        converter: Callable[[In, AttrsInstance, Attribute], Out],
+        *,
+        takes_self: Literal[True],
+        takes_field: Literal[True],
+    ) -> None: ...
+    @overload
+    def __init__(
+        self,
+        converter: Callable[[In, Attribute], Out],
+        *,
+        takes_field: Literal[True],
+    ) -> None: ...
+    @overload
+    def __init__(
+        self,
+        converter: Callable[[In, AttrsInstance], Out],
+        *,
+        takes_self: Literal[True],
+    ) -> None: ...
+
+class Attribute(Generic[_T]):
+    name: str
+    default: _T | None
+    validator: _ValidatorType[_T] | None
+    repr: _ReprArgType
+    cmp: _EqOrderType
+    eq: _EqOrderType
+    order: _EqOrderType
+    hash: bool | None
+    init: bool
+    converter: Converter | None
+    metadata: dict[Any, Any]
+    type: type[_T] | None
+    kw_only: bool
+    on_setattr: _OnSetAttrType
+    alias: str | None
+
+    def evolve(self, **changes: Any) -> "Attribute[Any]": ...
+
+# NOTE: We had several choices for the annotation to use for type arg:
+# 1) Type[_T]
+#   - Pros: Handles simple cases correctly
+#   - Cons: Might produce less informative errors in the case of conflicting
+#     TypeVars e.g. `attr.ib(default='bad', type=int)`
+# 2) Callable[..., _T]
+#   - Pros: Better error messages than #1 for conflicting TypeVars
+#   - Cons: Terrible error messages for validator checks.
+#   e.g. attr.ib(type=int, validator=validate_str)
+#        -> error: Cannot infer function type argument
+# 3) type (and do all of the work in the mypy plugin)
+#   - Pros: Simple here, and we could customize the plugin with our own errors.
+#   - Cons: Would need to write mypy plugin code to handle all the cases.
+# We chose option #1.
+
+# `attr` lies about its return type to make the following possible:
+#     attr()    -> Any
+#     attr(8)   -> int
+#     attr(validator=<some callable>)  -> Whatever the callable expects.
+# This makes this type of assignments possible:
+#     x: int = attr(8)
+#
+# This form catches explicit None or no default but with no other arguments
+# returns Any.
+@overload
+def attrib(
+    default: None = ...,
+    validator: None = ...,
+    repr: _ReprArgType = ...,
+    cmp: _EqOrderType | None = ...,
+    hash: bool | None = ...,
+    init: bool = ...,
+    metadata: Mapping[Any, Any] | None = ...,
+    type: None = ...,
+    converter: None = ...,
+    factory: None = ...,
+    kw_only: bool = ...,
+    eq: _EqOrderType | None = ...,
+    order: _EqOrderType | None = ...,
+    on_setattr: _OnSetAttrArgType | None = ...,
+    alias: str | None = ...,
+) -> Any: ...
+
+# This form catches an explicit None or no default and infers the type from the
+# other arguments.
+@overload
+def attrib(
+    default: None = ...,
+    validator: _ValidatorArgType[_T] | None = ...,
+    repr: _ReprArgType = ...,
+    cmp: _EqOrderType | None = ...,
+    hash: bool | None = ...,
+    init: bool = ...,
+    metadata: Mapping[Any, Any] | None = ...,
+    type: type[_T] | None = ...,
+    converter: _ConverterType
+    | list[_ConverterType]
+    | tuple[_ConverterType]
+    | None = ...,
+    factory: Callable[[], _T] | None = ...,
+    kw_only: bool = ...,
+    eq: _EqOrderType | None = ...,
+    order: _EqOrderType | None = ...,
+    on_setattr: _OnSetAttrArgType | None = ...,
+    alias: str | None = ...,
+) -> _T: ...
+
+# This form catches an explicit default argument.
+@overload
+def attrib(
+    default: _T,
+    validator: _ValidatorArgType[_T] | None = ...,
+    repr: _ReprArgType = ...,
+    cmp: _EqOrderType | None = ...,
+    hash: bool | None = ...,
+    init: bool = ...,
+    metadata: Mapping[Any, Any] | None = ...,
+    type: type[_T] | None = ...,
+    converter: _ConverterType
+    | list[_ConverterType]
+    | tuple[_ConverterType]
+    | None = ...,
+    factory: Callable[[], _T] | None = ...,
+    kw_only: bool = ...,
+    eq: _EqOrderType | None = ...,
+    order: _EqOrderType | None = ...,
+    on_setattr: _OnSetAttrArgType | None = ...,
+    alias: str | None = ...,
+) -> _T: ...
+
+# This form covers type=non-Type: e.g. forward references (str), Any
+@overload
+def attrib(
+    default: _T | None = ...,
+    validator: _ValidatorArgType[_T] | None = ...,
+    repr: _ReprArgType = ...,
+    cmp: _EqOrderType | None = ...,
+    hash: bool | None = ...,
+    init: bool = ...,
+    metadata: Mapping[Any, Any] | None = ...,
+    type: object = ...,
+    converter: _ConverterType
+    | list[_ConverterType]
+    | tuple[_ConverterType]
+    | None = ...,
+    factory: Callable[[], _T] | None = ...,
+    kw_only: bool = ...,
+    eq: _EqOrderType | None = ...,
+    order: _EqOrderType | None = ...,
+    on_setattr: _OnSetAttrArgType | None = ...,
+    alias: str | None = ...,
+) -> Any: ...
+@overload
+@dataclass_transform(order_default=True, field_specifiers=(attrib, field))
+def attrs(
+    maybe_cls: _C,
+    these: dict[str, Any] | None = ...,
+    repr_ns: str | None = ...,
+    repr: bool = ...,
+    cmp: _EqOrderType | None = ...,
+    hash: bool | None = ...,
+    init: bool = ...,
+    slots: bool = ...,
+    frozen: bool = ...,
+    weakref_slot: bool = ...,
+    str: bool = ...,
+    auto_attribs: bool = ...,
+    kw_only: bool = ...,
+    cache_hash: bool = ...,
+    auto_exc: bool = ...,
+    eq: _EqOrderType | None = ...,
+    order: _EqOrderType | None = ...,
+    auto_detect: bool = ...,
+    collect_by_mro: bool = ...,
+    getstate_setstate: bool | None = ...,
+    on_setattr: _OnSetAttrArgType | None = ...,
+    field_transformer: _FieldTransformer | None = ...,
+    match_args: bool = ...,
+    unsafe_hash: bool | None = ...,
+) -> _C: ...
+@overload
+@dataclass_transform(order_default=True, field_specifiers=(attrib, field))
+def attrs(
+    maybe_cls: None = ...,
+    these: dict[str, Any] | None = ...,
+    repr_ns: str | None = ...,
+    repr: bool = ...,
+    cmp: _EqOrderType | None = ...,
+    hash: bool | None = ...,
+    init: bool = ...,
+    slots: bool = ...,
+    frozen: bool = ...,
+    weakref_slot: bool = ...,
+    str: bool = ...,
+    auto_attribs: bool = ...,
+    kw_only: bool = ...,
+    cache_hash: bool = ...,
+    auto_exc: bool = ...,
+    eq: _EqOrderType | None = ...,
+    order: _EqOrderType | None = ...,
+    auto_detect: bool = ...,
+    collect_by_mro: bool = ...,
+    getstate_setstate: bool | None = ...,
+    on_setattr: _OnSetAttrArgType | None = ...,
+    field_transformer: _FieldTransformer | None = ...,
+    match_args: bool = ...,
+    unsafe_hash: bool | None = ...,
+) -> Callable[[_C], _C]: ...
+def fields(cls: type[AttrsInstance]) -> Any: ...
+def fields_dict(cls: type[AttrsInstance]) -> dict[str, Attribute[Any]]: ...
+def validate(inst: AttrsInstance) -> None: ...
+def resolve_types(
+    cls: _A,
+    globalns: dict[str, Any] | None = ...,
+    localns: dict[str, Any] | None = ...,
+    attribs: list[Attribute[Any]] | None = ...,
+    include_extras: bool = ...,
+) -> _A: ...
+
+# TODO: add support for returning a proper attrs class from the mypy plugin
+# we use Any instead of _CountingAttr so that e.g. `make_class('Foo',
+# [attr.ib()])` is valid
+def make_class(
+    name: str,
+    attrs: list[str] | tuple[str, ...] | dict[str, Any],
+    bases: tuple[type, ...] = ...,
+    class_body: dict[str, Any] | None = ...,
+    repr_ns: str | None = ...,
+    repr: bool = ...,
+    cmp: _EqOrderType | None = ...,
+    hash: bool | None = ...,
+    init: bool = ...,
+    slots: bool = ...,
+    frozen: bool = ...,
+    weakref_slot: bool = ...,
+    str: bool = ...,
+    auto_attribs: bool = ...,
+    kw_only: bool = ...,
+    cache_hash: bool = ...,
+    auto_exc: bool = ...,
+    eq: _EqOrderType | None = ...,
+    order: _EqOrderType | None = ...,
+    collect_by_mro: bool = ...,
+    on_setattr: _OnSetAttrArgType | None = ...,
+    field_transformer: _FieldTransformer | None = ...,
+) -> type: ...
+
+# _funcs --
+
+# TODO: add support for returning TypedDict from the mypy plugin
+# FIXME: asdict/astuple do not honor their factory args. Waiting on one of
+# these:
+# https://github.com/python/mypy/issues/4236
+# https://github.com/python/typing/issues/253
+# XXX: remember to fix attrs.asdict/astuple too!
+def asdict(
+    inst: AttrsInstance,
+    recurse: bool = ...,
+    filter: _FilterType[Any] | None = ...,
+    dict_factory: type[Mapping[Any, Any]] = ...,
+    retain_collection_types: bool = ...,
+    value_serializer: Callable[[type, Attribute[Any], Any], Any] | None = ...,
+    tuple_keys: bool | None = ...,
+) -> dict[str, Any]: ...
+
+# TODO: add support for returning NamedTuple from the mypy plugin
+def astuple(
+    inst: AttrsInstance,
+    recurse: bool = ...,
+    filter: _FilterType[Any] | None = ...,
+    tuple_factory: type[Sequence[Any]] = ...,
+    retain_collection_types: bool = ...,
+) -> tuple[Any, ...]: ...
+def has(cls: type) -> TypeGuard[type[AttrsInstance]]: ...
+def assoc(inst: _T, **changes: Any) -> _T: ...
+def evolve(inst: _T, **changes: Any) -> _T: ...
+
+# _config --
+
+def set_run_validators(run: bool) -> None: ...
+def get_run_validators() -> bool: ...
+
+# aliases --
+
+s = attributes = attrs
+ib = attr = attrib
+dataclass = attrs  # Technically, partial(attrs, auto_attribs=True) ;)
diff --git a/.venv/lib/python3.12/site-packages/attr/_cmp.py b/.venv/lib/python3.12/site-packages/attr/_cmp.py
new file mode 100644
index 00000000..09bab491
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/attr/_cmp.py
@@ -0,0 +1,160 @@
+# SPDX-License-Identifier: MIT
+
+
+import functools
+import types
+
+from ._make import __ne__
+
+
+_operation_names = {"eq": "==", "lt": "<", "le": "<=", "gt": ">", "ge": ">="}
+
+
+def cmp_using(
+    eq=None,
+    lt=None,
+    le=None,
+    gt=None,
+    ge=None,
+    require_same_type=True,
+    class_name="Comparable",
+):
+    """
+    Create a class that can be passed into `attrs.field`'s ``eq``, ``order``,
+    and ``cmp`` arguments to customize field comparison.
+
+    The resulting class will have a full set of ordering methods if at least
+    one of ``{lt, le, gt, ge}`` and ``eq``  are provided.
+
+    Args:
+        eq (typing.Callable | None):
+            Callable used to evaluate equality of two objects.
+
+        lt (typing.Callable | None):
+            Callable used to evaluate whether one object is less than another
+            object.
+
+        le (typing.Callable | None):
+            Callable used to evaluate whether one object is less than or equal
+            to another object.
+
+        gt (typing.Callable | None):
+            Callable used to evaluate whether one object is greater than
+            another object.
+
+        ge (typing.Callable | None):
+            Callable used to evaluate whether one object is greater than or
+            equal to another object.
+
+        require_same_type (bool):
+            When `True`, equality and ordering methods will return
+            `NotImplemented` if objects are not of the same type.
+
+        class_name (str | None): Name of class. Defaults to "Comparable".
+
+    See `comparison` for more details.
+
+    .. versionadded:: 21.1.0
+    """
+
+    body = {
+        "__slots__": ["value"],
+        "__init__": _make_init(),
+        "_requirements": [],
+        "_is_comparable_to": _is_comparable_to,
+    }
+
+    # Add operations.
+    num_order_functions = 0
+    has_eq_function = False
+
+    if eq is not None:
+        has_eq_function = True
+        body["__eq__"] = _make_operator("eq", eq)
+        body["__ne__"] = __ne__
+
+    if lt is not None:
+        num_order_functions += 1
+        body["__lt__"] = _make_operator("lt", lt)
+
+    if le is not None:
+        num_order_functions += 1
+        body["__le__"] = _make_operator("le", le)
+
+    if gt is not None:
+        num_order_functions += 1
+        body["__gt__"] = _make_operator("gt", gt)
+
+    if ge is not None:
+        num_order_functions += 1
+        body["__ge__"] = _make_operator("ge", ge)
+
+    type_ = types.new_class(
+        class_name, (object,), {}, lambda ns: ns.update(body)
+    )
+
+    # Add same type requirement.
+    if require_same_type:
+        type_._requirements.append(_check_same_type)
+
+    # Add total ordering if at least one operation was defined.
+    if 0 < num_order_functions < 4:
+        if not has_eq_function:
+            # functools.total_ordering requires __eq__ to be defined,
+            # so raise early error here to keep a nice stack.
+            msg = "eq must be define is order to complete ordering from lt, le, gt, ge."
+            raise ValueError(msg)
+        type_ = functools.total_ordering(type_)
+
+    return type_
+
+
+def _make_init():
+    """
+    Create __init__ method.
+    """
+
+    def __init__(self, value):
+        """
+        Initialize object with *value*.
+        """
+        self.value = value
+
+    return __init__
+
+
+def _make_operator(name, func):
+    """
+    Create operator method.
+    """
+
+    def method(self, other):
+        if not self._is_comparable_to(other):
+            return NotImplemented
+
+        result = func(self.value, other.value)
+        if result is NotImplemented:
+            return NotImplemented
+
+        return result
+
+    method.__name__ = f"__{name}__"
+    method.__doc__ = (
+        f"Return a {_operation_names[name]} b.  Computed by attrs."
+    )
+
+    return method
+
+
+def _is_comparable_to(self, other):
+    """
+    Check whether `other` is comparable to `self`.
+    """
+    return all(func(self, other) for func in self._requirements)
+
+
+def _check_same_type(self, other):
+    """
+    Return True if *self* and *other* are of the same type, False otherwise.
+    """
+    return other.value.__class__ is self.value.__class__
diff --git a/.venv/lib/python3.12/site-packages/attr/_cmp.pyi b/.venv/lib/python3.12/site-packages/attr/_cmp.pyi
new file mode 100644
index 00000000..cc7893b0
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/attr/_cmp.pyi
@@ -0,0 +1,13 @@
+from typing import Any, Callable
+
+_CompareWithType = Callable[[Any, Any], bool]
+
+def cmp_using(
+    eq: _CompareWithType | None = ...,
+    lt: _CompareWithType | None = ...,
+    le: _CompareWithType | None = ...,
+    gt: _CompareWithType | None = ...,
+    ge: _CompareWithType | None = ...,
+    require_same_type: bool = ...,
+    class_name: str = ...,
+) -> type: ...
diff --git a/.venv/lib/python3.12/site-packages/attr/_compat.py b/.venv/lib/python3.12/site-packages/attr/_compat.py
new file mode 100644
index 00000000..22fcd783
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/attr/_compat.py
@@ -0,0 +1,94 @@
+# SPDX-License-Identifier: MIT
+
+import inspect
+import platform
+import sys
+import threading
+
+from collections.abc import Mapping, Sequence  # noqa: F401
+from typing import _GenericAlias
+
+
+PYPY = platform.python_implementation() == "PyPy"
+PY_3_9_PLUS = sys.version_info[:2] >= (3, 9)
+PY_3_10_PLUS = sys.version_info[:2] >= (3, 10)
+PY_3_11_PLUS = sys.version_info[:2] >= (3, 11)
+PY_3_12_PLUS = sys.version_info[:2] >= (3, 12)
+PY_3_13_PLUS = sys.version_info[:2] >= (3, 13)
+PY_3_14_PLUS = sys.version_info[:2] >= (3, 14)
+
+
+if PY_3_14_PLUS:  # pragma: no cover
+    import annotationlib
+
+    _get_annotations = annotationlib.get_annotations
+
+else:
+
+    def _get_annotations(cls):
+        """
+        Get annotations for *cls*.
+        """
+        return cls.__dict__.get("__annotations__", {})
+
+
+class _AnnotationExtractor:
+    """
+    Extract type annotations from a callable, returning None whenever there
+    is none.
+    """
+
+    __slots__ = ["sig"]
+
+    def __init__(self, callable):
+        try:
+            self.sig = inspect.signature(callable)
+        except (ValueError, TypeError):  # inspect failed
+            self.sig = None
+
+    def get_first_param_type(self):
+        """
+        Return the type annotation of the first argument if it's not empty.
+        """
+        if not self.sig:
+            return None
+
+        params = list(self.sig.parameters.values())
+        if params and params[0].annotation is not inspect.Parameter.empty:
+            return params[0].annotation
+
+        return None
+
+    def get_return_type(self):
+        """
+        Return the return type if it's not empty.
+        """
+        if (
+            self.sig
+            and self.sig.return_annotation is not inspect.Signature.empty
+        ):
+            return self.sig.return_annotation
+
+        return None
+
+
+# Thread-local global to track attrs instances which are already being repr'd.
+# This is needed because there is no other (thread-safe) way to pass info
+# about the instances that are already being repr'd through the call stack
+# in order to ensure we don't perform infinite recursion.
+#
+# For instance, if an instance contains a dict which contains that instance,
+# we need to know that we're already repr'ing the outside instance from within
+# the dict's repr() call.
+#
+# This lives here rather than in _make.py so that the functions in _make.py
+# don't have a direct reference to the thread-local in their globals dict.
+# If they have such a reference, it breaks cloudpickle.
+repr_context = threading.local()
+
+
+def get_generic_base(cl):
+    """If this is a generic class (A[str]), return the generic base for it."""
+    if cl.__class__ is _GenericAlias:
+        return cl.__origin__
+    return None
diff --git a/.venv/lib/python3.12/site-packages/attr/_config.py b/.venv/lib/python3.12/site-packages/attr/_config.py
new file mode 100644
index 00000000..4b257726
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/attr/_config.py
@@ -0,0 +1,31 @@
+# SPDX-License-Identifier: MIT
+
+__all__ = ["get_run_validators", "set_run_validators"]
+
+_run_validators = True
+
+
+def set_run_validators(run):
+    """
+    Set whether or not validators are run.  By default, they are run.
+
+    .. deprecated:: 21.3.0 It will not be removed, but it also will not be
+        moved to new ``attrs`` namespace. Use `attrs.validators.set_disabled()`
+        instead.
+    """
+    if not isinstance(run, bool):
+        msg = "'run' must be bool."
+        raise TypeError(msg)
+    global _run_validators
+    _run_validators = run
+
+
+def get_run_validators():
+    """
+    Return whether or not validators are run.
+
+    .. deprecated:: 21.3.0 It will not be removed, but it also will not be
+        moved to new ``attrs`` namespace. Use `attrs.validators.get_disabled()`
+        instead.
+    """
+    return _run_validators
diff --git a/.venv/lib/python3.12/site-packages/attr/_funcs.py b/.venv/lib/python3.12/site-packages/attr/_funcs.py
new file mode 100644
index 00000000..c39fb8aa
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/attr/_funcs.py
@@ -0,0 +1,468 @@
+# SPDX-License-Identifier: MIT
+
+
+import copy
+
+from ._compat import PY_3_9_PLUS, get_generic_base
+from ._make import _OBJ_SETATTR, NOTHING, fields
+from .exceptions import AttrsAttributeNotFoundError
+
+
+def asdict(
+    inst,
+    recurse=True,
+    filter=None,
+    dict_factory=dict,
+    retain_collection_types=False,
+    value_serializer=None,
+):
+    """
+    Return the *attrs* attribute values of *inst* as a dict.
+
+    Optionally recurse into other *attrs*-decorated classes.
+
+    Args:
+        inst: Instance of an *attrs*-decorated class.
+
+        recurse (bool): Recurse into classes that are also *attrs*-decorated.
+
+        filter (~typing.Callable):
+            A callable whose return code determines whether an attribute or
+            element is included (`True`) or dropped (`False`).  Is called with
+            the `attrs.Attribute` as the first argument and the value as the
+            second argument.
+
+        dict_factory (~typing.Callable):
+            A callable to produce dictionaries from.  For example, to produce
+            ordered dictionaries instead of normal Python dictionaries, pass in
+            ``collections.OrderedDict``.
+
+        retain_collection_types (bool):
+            Do not convert to `list` when encountering an attribute whose type
+            is `tuple` or `set`.  Only meaningful if *recurse* is `True`.
+
+        value_serializer (typing.Callable | None):
+            A hook that is called for every attribute or dict key/value.  It
+            receives the current instance, field and value and must return the
+            (updated) value.  The hook is run *after* the optional *filter* has
+            been applied.
+
+    Returns:
+        Return type of *dict_factory*.
+
+    Raises:
+        attrs.exceptions.NotAnAttrsClassError:
+            If *cls* is not an *attrs* class.
+
+    ..  versionadded:: 16.0.0 *dict_factory*
+    ..  versionadded:: 16.1.0 *retain_collection_types*
+    ..  versionadded:: 20.3.0 *value_serializer*
+    ..  versionadded:: 21.3.0
+        If a dict has a collection for a key, it is serialized as a tuple.
+    """
+    attrs = fields(inst.__class__)
+    rv = dict_factory()
+    for a in attrs:
+        v = getattr(inst, a.name)
+        if filter is not None and not filter(a, v):
+            continue
+
+        if value_serializer is not None:
+            v = value_serializer(inst, a, v)
+
+        if recurse is True:
+            if has(v.__class__):
+                rv[a.name] = asdict(
+                    v,
+                    recurse=True,
+                    filter=filter,
+                    dict_factory=dict_factory,
+                    retain_collection_types=retain_collection_types,
+                    value_serializer=value_serializer,
+                )
+            elif isinstance(v, (tuple, list, set, frozenset)):
+                cf = v.__class__ if retain_collection_types is True else list
+                items = [
+                    _asdict_anything(
+                        i,
+                        is_key=False,
+                        filter=filter,
+                        dict_factory=dict_factory,
+                        retain_collection_types=retain_collection_types,
+                        value_serializer=value_serializer,
+                    )
+                    for i in v
+                ]
+                try:
+                    rv[a.name] = cf(items)
+                except TypeError:
+                    if not issubclass(cf, tuple):
+                        raise
+                    # Workaround for TypeError: cf.__new__() missing 1 required
+                    # positional argument (which appears, for a namedturle)
+                    rv[a.name] = cf(*items)
+            elif isinstance(v, dict):
+                df = dict_factory
+                rv[a.name] = df(
+                    (
+                        _asdict_anything(
+                            kk,
+                            is_key=True,
+                            filter=filter,
+                            dict_factory=df,
+                            retain_collection_types=retain_collection_types,
+                            value_serializer=value_serializer,
+                        ),
+                        _asdict_anything(
+                            vv,
+                            is_key=False,
+                            filter=filter,
+                            dict_factory=df,
+                            retain_collection_types=retain_collection_types,
+                            value_serializer=value_serializer,
+                        ),
+                    )
+                    for kk, vv in v.items()
+                )
+            else:
+                rv[a.name] = v
+        else:
+            rv[a.name] = v
+    return rv
+
+
+def _asdict_anything(
+    val,
+    is_key,
+    filter,
+    dict_factory,
+    retain_collection_types,
+    value_serializer,
+):
+    """
+    ``asdict`` only works on attrs instances, this works on anything.
+    """
+    if getattr(val.__class__, "__attrs_attrs__", None) is not None:
+        # Attrs class.
+        rv = asdict(
+            val,
+            recurse=True,
+            filter=filter,
+            dict_factory=dict_factory,
+            retain_collection_types=retain_collection_types,
+            value_serializer=value_serializer,
+        )
+    elif isinstance(val, (tuple, list, set, frozenset)):
+        if retain_collection_types is True:
+            cf = val.__class__
+        elif is_key:
+            cf = tuple
+        else:
+            cf = list
+
+        rv = cf(
+            [
+                _asdict_anything(
+                    i,
+                    is_key=False,
+                    filter=filter,
+                    dict_factory=dict_factory,
+                    retain_collection_types=retain_collection_types,
+                    value_serializer=value_serializer,
+                )
+                for i in val
+            ]
+        )
+    elif isinstance(val, dict):
+        df = dict_factory
+        rv = df(
+            (
+                _asdict_anything(
+                    kk,
+                    is_key=True,
+                    filter=filter,
+                    dict_factory=df,
+                    retain_collection_types=retain_collection_types,
+                    value_serializer=value_serializer,
+                ),
+                _asdict_anything(
+                    vv,
+                    is_key=False,
+                    filter=filter,
+                    dict_factory=df,
+                    retain_collection_types=retain_collection_types,
+                    value_serializer=value_serializer,
+                ),
+            )
+            for kk, vv in val.items()
+        )
+    else:
+        rv = val
+        if value_serializer is not None:
+            rv = value_serializer(None, None, rv)
+
+    return rv
+
+
+def astuple(
+    inst,
+    recurse=True,
+    filter=None,
+    tuple_factory=tuple,
+    retain_collection_types=False,
+):
+    """
+    Return the *attrs* attribute values of *inst* as a tuple.
+
+    Optionally recurse into other *attrs*-decorated classes.
+
+    Args:
+        inst: Instance of an *attrs*-decorated class.
+
+        recurse (bool):
+            Recurse into classes that are also *attrs*-decorated.
+
+        filter (~typing.Callable):
+            A callable whose return code determines whether an attribute or
+            element is included (`True`) or dropped (`False`).  Is called with
+            the `attrs.Attribute` as the first argument and the value as the
+            second argument.
+
+        tuple_factory (~typing.Callable):
+            A callable to produce tuples from. For example, to produce lists
+            instead of tuples.
+
+        retain_collection_types (bool):
+            Do not convert to `list` or `dict` when encountering an attribute
+            which type is `tuple`, `dict` or `set`. Only meaningful if
+            *recurse* is `True`.
+
+    Returns:
+        Return type of *tuple_factory*
+
+    Raises:
+        attrs.exceptions.NotAnAttrsClassError:
+            If *cls* is not an *attrs* class.
+
+    ..  versionadded:: 16.2.0
+    """
+    attrs = fields(inst.__class__)
+    rv = []
+    retain = retain_collection_types  # Very long. :/
+    for a in attrs:
+        v = getattr(inst, a.name)
+        if filter is not None and not filter(a, v):
+            continue
+        if recurse is True:
+            if has(v.__class__):
+                rv.append(
+                    astuple(
+                        v,
+                        recurse=True,
+                        filter=filter,
+                        tuple_factory=tuple_factory,
+                        retain_collection_types=retain,
+                    )
+                )
+            elif isinstance(v, (tuple, list, set, frozenset)):
+                cf = v.__class__ if retain is True else list
+                items = [
+                    (
+                        astuple(
+                            j,
+                            recurse=True,
+                            filter=filter,
+                            tuple_factory=tuple_factory,
+                            retain_collection_types=retain,
+                        )
+                        if has(j.__class__)
+                        else j
+                    )
+                    for j in v
+                ]
+                try:
+                    rv.append(cf(items))
+                except TypeError:
+                    if not issubclass(cf, tuple):
+                        raise
+                    # Workaround for TypeError: cf.__new__() missing 1 required
+                    # positional argument (which appears, for a namedturle)
+                    rv.append(cf(*items))
+            elif isinstance(v, dict):
+                df = v.__class__ if retain is True else dict
+                rv.append(
+                    df(
+                        (
+                            (
+                                astuple(
+                                    kk,
+                                    tuple_factory=tuple_factory,
+                                    retain_collection_types=retain,
+                                )
+                                if has(kk.__class__)
+                                else kk
+                            ),
+                            (
+                                astuple(
+                                    vv,
+                                    tuple_factory=tuple_factory,
+                                    retain_collection_types=retain,
+                                )
+                                if has(vv.__class__)
+                                else vv
+                            ),
+                        )
+                        for kk, vv in v.items()
+                    )
+                )
+            else:
+                rv.append(v)
+        else:
+            rv.append(v)
+
+    return rv if tuple_factory is list else tuple_factory(rv)
+
+
+def has(cls):
+    """
+    Check whether *cls* is a class with *attrs* attributes.
+
+    Args:
+        cls (type): Class to introspect.
+
+    Raises:
+        TypeError: If *cls* is not a class.
+
+    Returns:
+        bool:
+    """
+    attrs = getattr(cls, "__attrs_attrs__", None)
+    if attrs is not None:
+        return True
+
+    # No attrs, maybe it's a specialized generic (A[str])?
+    generic_base = get_generic_base(cls)
+    if generic_base is not None:
+        generic_attrs = getattr(generic_base, "__attrs_attrs__", None)
+        if generic_attrs is not None:
+            # Stick it on here for speed next time.
+            cls.__attrs_attrs__ = generic_attrs
+        return generic_attrs is not None
+    return False
+
+
+def assoc(inst, **changes):
+    """
+    Copy *inst* and apply *changes*.
+
+    This is different from `evolve` that applies the changes to the arguments
+    that create the new instance.
+
+    `evolve`'s behavior is preferable, but there are `edge cases`_ where it
+    doesn't work. Therefore `assoc` is deprecated, but will not be removed.
+
+    .. _`edge cases`: https://github.com/python-attrs/attrs/issues/251
+
+    Args:
+        inst: Instance of a class with *attrs* attributes.
+
+        changes: Keyword changes in the new copy.
+
+    Returns:
+        A copy of inst with *changes* incorporated.
+
+    Raises:
+        attrs.exceptions.AttrsAttributeNotFoundError:
+            If *attr_name* couldn't be found on *cls*.
+
+        attrs.exceptions.NotAnAttrsClassError:
+            If *cls* is not an *attrs* class.
+
+    ..  deprecated:: 17.1.0
+        Use `attrs.evolve` instead if you can. This function will not be
+        removed du to the slightly different approach compared to
+        `attrs.evolve`, though.
+    """
+    new = copy.copy(inst)
+    attrs = fields(inst.__class__)
+    for k, v in changes.items():
+        a = getattr(attrs, k, NOTHING)
+        if a is NOTHING:
+            msg = f"{k} is not an attrs attribute on {new.__class__}."
+            raise AttrsAttributeNotFoundError(msg)
+        _OBJ_SETATTR(new, k, v)
+    return new
+
+
+def resolve_types(
+    cls, globalns=None, localns=None, attribs=None, include_extras=True
+):
+    """
+    Resolve any strings and forward annotations in type annotations.
+
+    This is only required if you need concrete types in :class:`Attribute`'s
+    *type* field. In other words, you don't need to resolve your types if you
+    only use them for static type checking.
+
+    With no arguments, names will be looked up in the module in which the class
+    was created. If this is not what you want, for example, if the name only
+    exists inside a method, you may pass *globalns* or *localns* to specify
+    other dictionaries in which to look up these names. See the docs of
+    `typing.get_type_hints` for more details.
+
+    Args:
+        cls (type): Class to resolve.
+
+        globalns (dict | None): Dictionary containing global variables.
+
+        localns (dict | None): Dictionary containing local variables.
+
+        attribs (list | None):
+            List of attribs for the given class. This is necessary when calling
+            from inside a ``field_transformer`` since *cls* is not an *attrs*
+            class yet.
+
+        include_extras (bool):
+            Resolve more accurately, if possible. Pass ``include_extras`` to
+            ``typing.get_hints``, if supported by the typing module. On
+            supported Python versions (3.9+), this resolves the types more
+            accurately.
+
+    Raises:
+        TypeError: If *cls* is not a class.
+
+        attrs.exceptions.NotAnAttrsClassError:
+            If *cls* is not an *attrs* class and you didn't pass any attribs.
+
+        NameError: If types cannot be resolved because of missing variables.
+
+    Returns:
+        *cls* so you can use this function also as a class decorator. Please
+        note that you have to apply it **after** `attrs.define`. That means the
+        decorator has to come in the line **before** `attrs.define`.
+
+    ..  versionadded:: 20.1.0
+    ..  versionadded:: 21.1.0 *attribs*
+    ..  versionadded:: 23.1.0 *include_extras*
+    """
+    # Since calling get_type_hints is expensive we cache whether we've
+    # done it already.
+    if getattr(cls, "__attrs_types_resolved__", None) != cls:
+        import typing
+
+        kwargs = {"globalns": globalns, "localns": localns}
+
+        if PY_3_9_PLUS:
+            kwargs["include_extras"] = include_extras
+
+        hints = typing.get_type_hints(cls, **kwargs)
+        for field in fields(cls) if attribs is None else attribs:
+            if field.name in hints:
+                # Since fields have been frozen we must work around it.
+                _OBJ_SETATTR(field, "type", hints[field.name])
+        # We store the class we resolved so that subclasses know they haven't
+        # been resolved.
+        cls.__attrs_types_resolved__ = cls
+
+    # Return the class so you can use it as a decorator too.
+    return cls
diff --git a/.venv/lib/python3.12/site-packages/attr/_make.py b/.venv/lib/python3.12/site-packages/attr/_make.py
new file mode 100644
index 00000000..e84d9792
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/attr/_make.py
@@ -0,0 +1,3123 @@
+# SPDX-License-Identifier: MIT
+
+from __future__ import annotations
+
+import abc
+import contextlib
+import copy
+import enum
+import inspect
+import itertools
+import linecache
+import sys
+import types
+import unicodedata
+
+from collections.abc import Callable, Mapping
+from functools import cached_property
+from typing import Any, NamedTuple, TypeVar
+
+# We need to import _compat itself in addition to the _compat members to avoid
+# having the thread-local in the globals here.
+from . import _compat, _config, setters
+from ._compat import (
+    PY_3_10_PLUS,
+    PY_3_11_PLUS,
+    PY_3_13_PLUS,
+    _AnnotationExtractor,
+    _get_annotations,
+    get_generic_base,
+)
+from .exceptions import (
+    DefaultAlreadySetError,
+    FrozenInstanceError,
+    NotAnAttrsClassError,
+    UnannotatedAttributeError,
+)
+
+
+# This is used at least twice, so cache it here.
+_OBJ_SETATTR = object.__setattr__
+_INIT_FACTORY_PAT = "__attr_factory_%s"
+_CLASSVAR_PREFIXES = (
+    "typing.ClassVar",
+    "t.ClassVar",
+    "ClassVar",
+    "typing_extensions.ClassVar",
+)
+# we don't use a double-underscore prefix because that triggers
+# name mangling when trying to create a slot for the field
+# (when slots=True)
+_HASH_CACHE_FIELD = "_attrs_cached_hash"
+
+_EMPTY_METADATA_SINGLETON = types.MappingProxyType({})
+
+# Unique object for unequivocal getattr() defaults.
+_SENTINEL = object()
+
+_DEFAULT_ON_SETATTR = setters.pipe(setters.convert, setters.validate)
+
+
+class _Nothing(enum.Enum):
+    """
+    Sentinel to indicate the lack of a value when `None` is ambiguous.
+
+    If extending attrs, you can use ``typing.Literal[NOTHING]`` to show
+    that a value may be ``NOTHING``.
+
+    .. versionchanged:: 21.1.0 ``bool(NOTHING)`` is now False.
+    .. versionchanged:: 22.2.0 ``NOTHING`` is now an ``enum.Enum`` variant.
+    """
+
+    NOTHING = enum.auto()
+
+    def __repr__(self):
+        return "NOTHING"
+
+    def __bool__(self):
+        return False
+
+
+NOTHING = _Nothing.NOTHING
+"""
+Sentinel to indicate the lack of a value when `None` is ambiguous.
+
+When using in 3rd party code, use `attrs.NothingType` for type annotations.
+"""
+
+
+class _CacheHashWrapper(int):
+    """
+    An integer subclass that pickles / copies as None
+
+    This is used for non-slots classes with ``cache_hash=True``, to avoid
+    serializing a potentially (even likely) invalid hash value. Since `None`
+    is the default value for uncalculated hashes, whenever this is copied,
+    the copy's value for the hash should automatically reset.
+
+    See GH #613 for more details.
+    """
+
+    def __reduce__(self, _none_constructor=type(None), _args=()):  # noqa: B008
+        return _none_constructor, _args
+
+
+def attrib(
+    default=NOTHING,
+    validator=None,
+    repr=True,
+    cmp=None,
+    hash=None,
+    init=True,
+    metadata=None,
+    type=None,
+    converter=None,
+    factory=None,
+    kw_only=False,
+    eq=None,
+    order=None,
+    on_setattr=None,
+    alias=None,
+):
+    """
+    Create a new field / attribute on a class.
+
+    Identical to `attrs.field`, except it's not keyword-only.
+
+    Consider using `attrs.field` in new code (``attr.ib`` will *never* go away,
+    though).
+
+    ..  warning::
+
+        Does **nothing** unless the class is also decorated with
+        `attr.s` (or similar)!
+
+
+    .. versionadded:: 15.2.0 *convert*
+    .. versionadded:: 16.3.0 *metadata*
+    .. versionchanged:: 17.1.0 *validator* can be a ``list`` now.
+    .. versionchanged:: 17.1.0
+       *hash* is `None` and therefore mirrors *eq* by default.
+    .. versionadded:: 17.3.0 *type*
+    .. deprecated:: 17.4.0 *convert*
+    .. versionadded:: 17.4.0
+       *converter* as a replacement for the deprecated *convert* to achieve
+       consistency with other noun-based arguments.
+    .. versionadded:: 18.1.0
+       ``factory=f`` is syntactic sugar for ``default=attr.Factory(f)``.
+    .. versionadded:: 18.2.0 *kw_only*
+    .. versionchanged:: 19.2.0 *convert* keyword argument removed.
+    .. versionchanged:: 19.2.0 *repr* also accepts a custom callable.
+    .. deprecated:: 19.2.0 *cmp* Removal on or after 2021-06-01.
+    .. versionadded:: 19.2.0 *eq* and *order*
+    .. versionadded:: 20.1.0 *on_setattr*
+    .. versionchanged:: 20.3.0 *kw_only* backported to Python 2
+    .. versionchanged:: 21.1.0
+       *eq*, *order*, and *cmp* also accept a custom callable
+    .. versionchanged:: 21.1.0 *cmp* undeprecated
+    .. versionadded:: 22.2.0 *alias*
+    """
+    eq, eq_key, order, order_key = _determine_attrib_eq_order(
+        cmp, eq, order, True
+    )
+
+    if hash is not None and hash is not True and hash is not False:
+        msg = "Invalid value for hash.  Must be True, False, or None."
+        raise TypeError(msg)
+
+    if factory is not None:
+        if default is not NOTHING:
+            msg = (
+                "The `default` and `factory` arguments are mutually exclusive."
+            )
+            raise ValueError(msg)
+        if not callable(factory):
+            msg = "The `factory` argument must be a callable."
+            raise ValueError(msg)
+        default = Factory(factory)
+
+    if metadata is None:
+        metadata = {}
+
+    # Apply syntactic sugar by auto-wrapping.
+    if isinstance(on_setattr, (list, tuple)):
+        on_setattr = setters.pipe(*on_setattr)
+
+    if validator and isinstance(validator, (list, tuple)):
+        validator = and_(*validator)
+
+    if converter and isinstance(converter, (list, tuple)):
+        converter = pipe(*converter)
+
+    return _CountingAttr(
+        default=default,
+        validator=validator,
+        repr=repr,
+        cmp=None,
+        hash=hash,
+        init=init,
+        converter=converter,
+        metadata=metadata,
+        type=type,
+        kw_only=kw_only,
+        eq=eq,
+        eq_key=eq_key,
+        order=order,
+        order_key=order_key,
+        on_setattr=on_setattr,
+        alias=alias,
+    )
+
+
+def _compile_and_eval(
+    script: str,
+    globs: dict[str, Any] | None,
+    locs: Mapping[str, object] | None = None,
+    filename: str = "",
+) -> None:
+    """
+    Evaluate the script with the given global (globs) and local (locs)
+    variables.
+    """
+    bytecode = compile(script, filename, "exec")
+    eval(bytecode, globs, locs)
+
+
+def _linecache_and_compile(
+    script: str,
+    filename: str,
+    globs: dict[str, Any] | None,
+    locals: Mapping[str, object] | None = None,
+) -> dict[str, Any]:
+    """
+    Cache the script with _linecache_, compile it and return the _locals_.
+    """
+
+    locs = {} if locals is None else locals
+
+    # In order of debuggers like PDB being able to step through the code,
+    # we add a fake linecache entry.
+    count = 1
+    base_filename = filename
+    while True:
+        linecache_tuple = (
+            len(script),
+            None,
+            script.splitlines(True),
+            filename,
+        )
+        old_val = linecache.cache.setdefault(filename, linecache_tuple)
+        if old_val == linecache_tuple:
+            break
+
+        filename = f"{base_filename[:-1]}-{count}>"
+        count += 1
+
+    _compile_and_eval(script, globs, locs, filename)
+
+    return locs
+
+
+def _make_attr_tuple_class(cls_name: str, attr_names: list[str]) -> type:
+    """
+    Create a tuple subclass to hold `Attribute`s for an `attrs` class.
+
+    The subclass is a bare tuple with properties for names.
+
+    class MyClassAttributes(tuple):
+        __slots__ = ()
+        x = property(itemgetter(0))
+    """
+    attr_class_name = f"{cls_name}Attributes"
+    body = {}
+    for i, attr_name in enumerate(attr_names):
+
+        def getter(self, i=i):
+            return self[i]
+
+        body[attr_name] = property(getter)
+    return type(attr_class_name, (tuple,), body)
+
+
+# Tuple class for extracted attributes from a class definition.
+# `base_attrs` is a subset of `attrs`.
+class _Attributes(NamedTuple):
+    attrs: type
+    base_attrs: list[Attribute]
+    base_attrs_map: dict[str, type]
+
+
+def _is_class_var(annot):
+    """
+    Check whether *annot* is a typing.ClassVar.
+
+    The string comparison hack is used to avoid evaluating all string
+    annotations which would put attrs-based classes at a performance
+    disadvantage compared to plain old classes.
+    """
+    annot = str(annot)
+
+    # Annotation can be quoted.
+    if annot.startswith(("'", '"')) and annot.endswith(("'", '"')):
+        annot = annot[1:-1]
+
+    return annot.startswith(_CLASSVAR_PREFIXES)
+
+
+def _has_own_attribute(cls, attrib_name):
+    """
+    Check whether *cls* defines *attrib_name* (and doesn't just inherit it).
+    """
+    return attrib_name in cls.__dict__
+
+
+def _collect_base_attrs(
+    cls, taken_attr_names
+) -> tuple[list[Attribute], dict[str, type]]:
+    """
+    Collect attr.ibs from base classes of *cls*, except *taken_attr_names*.
+    """
+    base_attrs = []
+    base_attr_map = {}  # A dictionary of base attrs to their classes.
+
+    # Traverse the MRO and collect attributes.
+    for base_cls in reversed(cls.__mro__[1:-1]):
+        for a in getattr(base_cls, "__attrs_attrs__", []):
+            if a.inherited or a.name in taken_attr_names:
+                continue
+
+            a = a.evolve(inherited=True)  # noqa: PLW2901
+            base_attrs.append(a)
+            base_attr_map[a.name] = base_cls
+
+    # For each name, only keep the freshest definition i.e. the furthest at the
+    # back.  base_attr_map is fine because it gets overwritten with every new
+    # instance.
+    filtered = []
+    seen = set()
+    for a in reversed(base_attrs):
+        if a.name in seen:
+            continue
+        filtered.insert(0, a)
+        seen.add(a.name)
+
+    return filtered, base_attr_map
+
+
+def _collect_base_attrs_broken(cls, taken_attr_names):
+    """
+    Collect attr.ibs from base classes of *cls*, except *taken_attr_names*.
+
+    N.B. *taken_attr_names* will be mutated.
+
+    Adhere to the old incorrect behavior.
+
+    Notably it collects from the front and considers inherited attributes which
+    leads to the buggy behavior reported in #428.
+    """
+    base_attrs = []
+    base_attr_map = {}  # A dictionary of base attrs to their classes.
+
+    # Traverse the MRO and collect attributes.
+    for base_cls in cls.__mro__[1:-1]:
+        for a in getattr(base_cls, "__attrs_attrs__", []):
+            if a.name in taken_attr_names:
+                continue
+
+            a = a.evolve(inherited=True)  # noqa: PLW2901
+            taken_attr_names.add(a.name)
+            base_attrs.append(a)
+            base_attr_map[a.name] = base_cls
+
+    return base_attrs, base_attr_map
+
+
+def _transform_attrs(
+    cls, these, auto_attribs, kw_only, collect_by_mro, field_transformer
+) -> _Attributes:
+    """
+    Transform all `_CountingAttr`s on a class into `Attribute`s.
+
+    If *these* is passed, use that and don't look for them on the class.
+
+    If *collect_by_mro* is True, collect them in the correct MRO order,
+    otherwise use the old -- incorrect -- order.  See #428.
+
+    Return an `_Attributes`.
+    """
+    cd = cls.__dict__
+    anns = _get_annotations(cls)
+
+    if these is not None:
+        ca_list = list(these.items())
+    elif auto_attribs is True:
+        ca_names = {
+            name
+            for name, attr in cd.items()
+            if attr.__class__ is _CountingAttr
+        }
+        ca_list = []
+        annot_names = set()
+        for attr_name, type in anns.items():
+            if _is_class_var(type):
+                continue
+            annot_names.add(attr_name)
+            a = cd.get(attr_name, NOTHING)
+
+            if a.__class__ is not _CountingAttr:
+                a = attrib(a)
+            ca_list.append((attr_name, a))
+
+        unannotated = ca_names - annot_names
+        if unannotated:
+            raise UnannotatedAttributeError(
+                "The following `attr.ib`s lack a type annotation: "
+                + ", ".join(
+                    sorted(unannotated, key=lambda n: cd.get(n).counter)
+                )
+                + "."
+            )
+    else:
+        ca_list = sorted(
+            (
+                (name, attr)
+                for name, attr in cd.items()
+                if attr.__class__ is _CountingAttr
+            ),
+            key=lambda e: e[1].counter,
+        )
+
+    fca = Attribute.from_counting_attr
+    own_attrs = [
+        fca(attr_name, ca, anns.get(attr_name)) for attr_name, ca in ca_list
+    ]
+
+    if collect_by_mro:
+        base_attrs, base_attr_map = _collect_base_attrs(
+            cls, {a.name for a in own_attrs}
+        )
+    else:
+        base_attrs, base_attr_map = _collect_base_attrs_broken(
+            cls, {a.name for a in own_attrs}
+        )
+
+    if kw_only:
+        own_attrs = [a.evolve(kw_only=True) for a in own_attrs]
+        base_attrs = [a.evolve(kw_only=True) for a in base_attrs]
+
+    attrs = base_attrs + own_attrs
+
+    if field_transformer is not None:
+        attrs = tuple(field_transformer(cls, attrs))
+
+    # Check attr order after executing the field_transformer.
+    # Mandatory vs non-mandatory attr order only matters when they are part of
+    # the __init__ signature and when they aren't kw_only (which are moved to
+    # the end and can be mandatory or non-mandatory in any order, as they will
+    # be specified as keyword args anyway). Check the order of those attrs:
+    had_default = False
+    for a in (a for a in attrs if a.init is not False and a.kw_only is False):
+        if had_default is True and a.default is NOTHING:
+            msg = f"No mandatory attributes allowed after an attribute with a default value or factory.  Attribute in question: {a!r}"
+            raise ValueError(msg)
+
+        if had_default is False and a.default is not NOTHING:
+            had_default = True
+
+    # Resolve default field alias after executing field_transformer.
+    # This allows field_transformer to differentiate between explicit vs
+    # default aliases and supply their own defaults.
+    for a in attrs:
+        if not a.alias:
+            # Evolve is very slow, so we hold our nose and do it dirty.
+            _OBJ_SETATTR.__get__(a)("alias", _default_init_alias_for(a.name))
+
+    # Create AttrsClass *after* applying the field_transformer since it may
+    # add or remove attributes!
+    attr_names = [a.name for a in attrs]
+    AttrsClass = _make_attr_tuple_class(cls.__name__, attr_names)
+
+    return _Attributes(AttrsClass(attrs), base_attrs, base_attr_map)
+
+
+def _make_cached_property_getattr(cached_properties, original_getattr, cls):
+    lines = [
+        # Wrapped to get `__class__` into closure cell for super()
+        # (It will be replaced with the newly constructed class after construction).
+        "def wrapper(_cls):",
+        "    __class__ = _cls",
+        "    def __getattr__(self, item, cached_properties=cached_properties, original_getattr=original_getattr, _cached_setattr_get=_cached_setattr_get):",
+        "         func = cached_properties.get(item)",
+        "         if func is not None:",
+        "              result = func(self)",
+        "              _setter = _cached_setattr_get(self)",
+        "              _setter(item, result)",
+        "              return result",
+    ]
+    if original_getattr is not None:
+        lines.append(
+            "         return original_getattr(self, item)",
+        )
+    else:
+        lines.extend(
+            [
+                "         try:",
+                "             return super().__getattribute__(item)",
+                "         except AttributeError:",
+                "             if not hasattr(super(), '__getattr__'):",
+                "                 raise",
+                "             return super().__getattr__(item)",
+                "         original_error = f\"'{self.__class__.__name__}' object has no attribute '{item}'\"",
+                "         raise AttributeError(original_error)",
+            ]
+        )
+
+    lines.extend(
+        [
+            "    return __getattr__",
+            "__getattr__ = wrapper(_cls)",
+        ]
+    )
+
+    unique_filename = _generate_unique_filename(cls, "getattr")
+
+    glob = {
+        "cached_properties": cached_properties,
+        "_cached_setattr_get": _OBJ_SETATTR.__get__,
+        "original_getattr": original_getattr,
+    }
+
+    return _linecache_and_compile(
+        "\n".join(lines), unique_filename, glob, locals={"_cls": cls}
+    )["__getattr__"]
+
+
+def _frozen_setattrs(self, name, value):
+    """
+    Attached to frozen classes as __setattr__.
+    """
+    if isinstance(self, BaseException) and name in (
+        "__cause__",
+        "__context__",
+        "__traceback__",
+        "__suppress_context__",
+        "__notes__",
+    ):
+        BaseException.__setattr__(self, name, value)
+        return
+
+    raise FrozenInstanceError
+
+
+def _frozen_delattrs(self, name):
+    """
+    Attached to frozen classes as __delattr__.
+    """
+    if isinstance(self, BaseException) and name in ("__notes__",):
+        BaseException.__delattr__(self, name)
+        return
+
+    raise FrozenInstanceError
+
+
+def evolve(*args, **changes):
+    """
+    Create a new instance, based on the first positional argument with
+    *changes* applied.
+
+    .. tip::
+
+       On Python 3.13 and later, you can also use `copy.replace` instead.
+
+    Args:
+
+        inst:
+            Instance of a class with *attrs* attributes. *inst* must be passed
+            as a positional argument.
+
+        changes:
+            Keyword changes in the new copy.
+
+    Returns:
+        A copy of inst with *changes* incorporated.
+
+    Raises:
+        TypeError:
+            If *attr_name* couldn't be found in the class ``__init__``.
+
+        attrs.exceptions.NotAnAttrsClassError:
+            If *cls* is not an *attrs* class.
+
+    .. versionadded:: 17.1.0
+    .. deprecated:: 23.1.0
+       It is now deprecated to pass the instance using the keyword argument
+       *inst*. It will raise a warning until at least April 2024, after which
+       it will become an error. Always pass the instance as a positional
+       argument.
+    .. versionchanged:: 24.1.0
+       *inst* can't be passed as a keyword argument anymore.
+    """
+    try:
+        (inst,) = args
+    except ValueError:
+        msg = (
+            f"evolve() takes 1 positional argument, but {len(args)} were given"
+        )
+        raise TypeError(msg) from None
+
+    cls = inst.__class__
+    attrs = fields(cls)
+    for a in attrs:
+        if not a.init:
+            continue
+        attr_name = a.name  # To deal with private attributes.
+        init_name = a.alias
+        if init_name not in changes:
+            changes[init_name] = getattr(inst, attr_name)
+
+    return cls(**changes)
+
+
+class _ClassBuilder:
+    """
+    Iteratively build *one* class.
+    """
+
+    __slots__ = (
+        "_add_method_dunders",
+        "_attr_names",
+        "_attrs",
+        "_base_attr_map",
+        "_base_names",
+        "_cache_hash",
+        "_cls",
+        "_cls_dict",
+        "_delete_attribs",
+        "_frozen",
+        "_has_custom_setattr",
+        "_has_post_init",
+        "_has_pre_init",
+        "_is_exc",
+        "_on_setattr",
+        "_pre_init_has_args",
+        "_repr_added",
+        "_script_snippets",
+        "_slots",
+        "_weakref_slot",
+        "_wrote_own_setattr",
+    )
+
+    def __init__(
+        self,
+        cls: type,
+        these,
+        slots,
+        frozen,
+        weakref_slot,
+        getstate_setstate,
+        auto_attribs,
+        kw_only,
+        cache_hash,
+        is_exc,
+        collect_by_mro,
+        on_setattr,
+        has_custom_setattr,
+        field_transformer,
+    ):
+        attrs, base_attrs, base_map = _transform_attrs(
+            cls,
+            these,
+            auto_attribs,
+            kw_only,
+            collect_by_mro,
+            field_transformer,
+        )
+
+        self._cls = cls
+        self._cls_dict = dict(cls.__dict__) if slots else {}
+        self._attrs = attrs
+        self._base_names = {a.name for a in base_attrs}
+        self._base_attr_map = base_map
+        self._attr_names = tuple(a.name for a in attrs)
+        self._slots = slots
+        self._frozen = frozen
+        self._weakref_slot = weakref_slot
+        self._cache_hash = cache_hash
+        self._has_pre_init = bool(getattr(cls, "__attrs_pre_init__", False))
+        self._pre_init_has_args = False
+        if self._has_pre_init:
+            # Check if the pre init method has more arguments than just `self`
+            # We want to pass arguments if pre init expects arguments
+            pre_init_func = cls.__attrs_pre_init__
+            pre_init_signature = inspect.signature(pre_init_func)
+            self._pre_init_has_args = len(pre_init_signature.parameters) > 1
+        self._has_post_init = bool(getattr(cls, "__attrs_post_init__", False))
+        self._delete_attribs = not bool(these)
+        self._is_exc = is_exc
+        self._on_setattr = on_setattr
+
+        self._has_custom_setattr = has_custom_setattr
+        self._wrote_own_setattr = False
+
+        self._cls_dict["__attrs_attrs__"] = self._attrs
+
+        if frozen:
+            self._cls_dict["__setattr__"] = _frozen_setattrs
+            self._cls_dict["__delattr__"] = _frozen_delattrs
+
+            self._wrote_own_setattr = True
+        elif on_setattr in (
+            _DEFAULT_ON_SETATTR,
+            setters.validate,
+            setters.convert,
+        ):
+            has_validator = has_converter = False
+            for a in attrs:
+                if a.validator is not None:
+                    has_validator = True
+                if a.converter is not None:
+                    has_converter = True
+
+                if has_validator and has_converter:
+                    break
+            if (
+                (
+                    on_setattr == _DEFAULT_ON_SETATTR
+                    and not (has_validator or has_converter)
+                )
+                or (on_setattr == setters.validate and not has_validator)
+                or (on_setattr == setters.convert and not has_converter)
+            ):
+                # If class-level on_setattr is set to convert + validate, but
+                # there's no field to convert or validate, pretend like there's
+                # no on_setattr.
+                self._on_setattr = None
+
+        if getstate_setstate:
+            (
+                self._cls_dict["__getstate__"],
+                self._cls_dict["__setstate__"],
+            ) = self._make_getstate_setstate()
+
+        # tuples of script, globs, hook
+        self._script_snippets: list[
+            tuple[str, dict, Callable[[dict, dict], Any]]
+        ] = []
+        self._repr_added = False
+
+        # We want to only do this check once; in 99.9% of cases these
+        # exist.
+        if not hasattr(self._cls, "__module__") or not hasattr(
+            self._cls, "__qualname__"
+        ):
+            self._add_method_dunders = self._add_method_dunders_safe
+        else:
+            self._add_method_dunders = self._add_method_dunders_unsafe
+
+    def __repr__(self):
+        return f"<_ClassBuilder(cls={self._cls.__name__})>"
+
+    def _eval_snippets(self) -> None:
+        """
+        Evaluate any registered snippets in one go.
+        """
+        script = "\n".join([snippet[0] for snippet in self._script_snippets])
+        globs = {}
+        for _, snippet_globs, _ in self._script_snippets:
+            globs.update(snippet_globs)
+
+        locs = _linecache_and_compile(
+            script,
+            _generate_unique_filename(self._cls, "methods"),
+            globs,
+        )
+
+        for _, _, hook in self._script_snippets:
+            hook(self._cls_dict, locs)
+
+    def build_class(self):
+        """
+        Finalize class based on the accumulated configuration.
+
+        Builder cannot be used after calling this method.
+        """
+        self._eval_snippets()
+        if self._slots is True:
+            cls = self._create_slots_class()
+        else:
+            cls = self._patch_original_class()
+            if PY_3_10_PLUS:
+                cls = abc.update_abstractmethods(cls)
+
+        # The method gets only called if it's not inherited from a base class.
+        # _has_own_attribute does NOT work properly for classmethods.
+        if (
+            getattr(cls, "__attrs_init_subclass__", None)
+            and "__attrs_init_subclass__" not in cls.__dict__
+        ):
+            cls.__attrs_init_subclass__()
+
+        return cls
+
+    def _patch_original_class(self):
+        """
+        Apply accumulated methods and return the class.
+        """
+        cls = self._cls
+        base_names = self._base_names
+
+        # Clean class of attribute definitions (`attr.ib()`s).
+        if self._delete_attribs:
+            for name in self._attr_names:
+                if (
+                    name not in base_names
+                    and getattr(cls, name, _SENTINEL) is not _SENTINEL
+                ):
+                    # An AttributeError can happen if a base class defines a
+                    # class variable and we want to set an attribute with the
+                    # same name by using only a type annotation.
+                    with contextlib.suppress(AttributeError):
+                        delattr(cls, name)
+
+        # Attach our dunder methods.
+        for name, value in self._cls_dict.items():
+            setattr(cls, name, value)
+
+        # If we've inherited an attrs __setattr__ and don't write our own,
+        # reset it to object's.
+        if not self._wrote_own_setattr and getattr(
+            cls, "__attrs_own_setattr__", False
+        ):
+            cls.__attrs_own_setattr__ = False
+
+            if not self._has_custom_setattr:
+                cls.__setattr__ = _OBJ_SETATTR
+
+        return cls
+
+    def _create_slots_class(self):
+        """
+        Build and return a new class with a `__slots__` attribute.
+        """
+        cd = {
+            k: v
+            for k, v in self._cls_dict.items()
+            if k not in (*tuple(self._attr_names), "__dict__", "__weakref__")
+        }
+
+        # If our class doesn't have its own implementation of __setattr__
+        # (either from the user or by us), check the bases, if one of them has
+        # an attrs-made __setattr__, that needs to be reset. We don't walk the
+        # MRO because we only care about our immediate base classes.
+        # XXX: This can be confused by subclassing a slotted attrs class with
+        # XXX: a non-attrs class and subclass the resulting class with an attrs
+        # XXX: class.  See `test_slotted_confused` for details.  For now that's
+        # XXX: OK with us.
+        if not self._wrote_own_setattr:
+            cd["__attrs_own_setattr__"] = False
+
+            if not self._has_custom_setattr:
+                for base_cls in self._cls.__bases__:
+                    if base_cls.__dict__.get("__attrs_own_setattr__", False):
+                        cd["__setattr__"] = _OBJ_SETATTR
+                        break
+
+        # Traverse the MRO to collect existing slots
+        # and check for an existing __weakref__.
+        existing_slots = {}
+        weakref_inherited = False
+        for base_cls in self._cls.__mro__[1:-1]:
+            if base_cls.__dict__.get("__weakref__", None) is not None:
+                weakref_inherited = True
+            existing_slots.update(
+                {
+                    name: getattr(base_cls, name)
+                    for name in getattr(base_cls, "__slots__", [])
+                }
+            )
+
+        base_names = set(self._base_names)
+
+        names = self._attr_names
+        if (
+            self._weakref_slot
+            and "__weakref__" not in getattr(self._cls, "__slots__", ())
+            and "__weakref__" not in names
+            and not weakref_inherited
+        ):
+            names += ("__weakref__",)
+
+        cached_properties = {
+            name: cached_prop.func
+            for name, cached_prop in cd.items()
+            if isinstance(cached_prop, cached_property)
+        }
+
+        # Collect methods with a `__class__` reference that are shadowed in the new class.
+        # To know to update them.
+        additional_closure_functions_to_update = []
+        if cached_properties:
+            class_annotations = _get_annotations(self._cls)
+            for name, func in cached_properties.items():
+                # Add cached properties to names for slotting.
+                names += (name,)
+                # Clear out function from class to avoid clashing.
+                del cd[name]
+                additional_closure_functions_to_update.append(func)
+                annotation = inspect.signature(func).return_annotation
+                if annotation is not inspect.Parameter.empty:
+                    class_annotations[name] = annotation
+
+            original_getattr = cd.get("__getattr__")
+            if original_getattr is not None:
+                additional_closure_functions_to_update.append(original_getattr)
+
+            cd["__getattr__"] = _make_cached_property_getattr(
+                cached_properties, original_getattr, self._cls
+            )
+
+        # We only add the names of attributes that aren't inherited.
+        # Setting __slots__ to inherited attributes wastes memory.
+        slot_names = [name for name in names if name not in base_names]
+
+        # There are slots for attributes from current class
+        # that are defined in parent classes.
+        # As their descriptors may be overridden by a child class,
+        # we collect them here and update the class dict
+        reused_slots = {
+            slot: slot_descriptor
+            for slot, slot_descriptor in existing_slots.items()
+            if slot in slot_names
+        }
+        slot_names = [name for name in slot_names if name not in reused_slots]
+        cd.update(reused_slots)
+        if self._cache_hash:
+            slot_names.append(_HASH_CACHE_FIELD)
+
+        cd["__slots__"] = tuple(slot_names)
+
+        cd["__qualname__"] = self._cls.__qualname__
+
+        # Create new class based on old class and our methods.
+        cls = type(self._cls)(self._cls.__name__, self._cls.__bases__, cd)
+
+        # The following is a fix for
+        # <https://github.com/python-attrs/attrs/issues/102>.
+        # If a method mentions `__class__` or uses the no-arg super(), the
+        # compiler will bake a reference to the class in the method itself
+        # as `method.__closure__`.  Since we replace the class with a
+        # clone, we rewrite these references so it keeps working.
+        for item in itertools.chain(
+            cls.__dict__.values(), additional_closure_functions_to_update
+        ):
+            if isinstance(item, (classmethod, staticmethod)):
+                # Class- and staticmethods hide their functions inside.
+                # These might need to be rewritten as well.
+                closure_cells = getattr(item.__func__, "__closure__", None)
+            elif isinstance(item, property):
+                # Workaround for property `super()` shortcut (PY3-only).
+                # There is no universal way for other descriptors.
+                closure_cells = getattr(item.fget, "__closure__", None)
+            else:
+                closure_cells = getattr(item, "__closure__", None)
+
+            if not closure_cells:  # Catch None or the empty list.
+                continue
+            for cell in closure_cells:
+                try:
+                    match = cell.cell_contents is self._cls
+                except ValueError:  # noqa: PERF203
+                    # ValueError: Cell is empty
+                    pass
+                else:
+                    if match:
+                        cell.cell_contents = cls
+        return cls
+
+    def add_repr(self, ns):
+        script, globs = _make_repr_script(self._attrs, ns)
+
+        def _attach_repr(cls_dict, globs):
+            cls_dict["__repr__"] = self._add_method_dunders(globs["__repr__"])
+
+        self._script_snippets.append((script, globs, _attach_repr))
+        self._repr_added = True
+        return self
+
+    def add_str(self):
+        if not self._repr_added:
+            msg = "__str__ can only be generated if a __repr__ exists."
+            raise ValueError(msg)
+
+        def __str__(self):
+            return self.__repr__()
+
+        self._cls_dict["__str__"] = self._add_method_dunders(__str__)
+        return self
+
+    def _make_getstate_setstate(self):
+        """
+        Create custom __setstate__ and __getstate__ methods.
+        """
+        # __weakref__ is not writable.
+        state_attr_names = tuple(
+            an for an in self._attr_names if an != "__weakref__"
+        )
+
+        def slots_getstate(self):
+            """
+            Automatically created by attrs.
+            """
+            return {name: getattr(self, name) for name in state_attr_names}
+
+        hash_caching_enabled = self._cache_hash
+
+        def slots_setstate(self, state):
+            """
+            Automatically created by attrs.
+            """
+            __bound_setattr = _OBJ_SETATTR.__get__(self)
+            if isinstance(state, tuple):
+                # Backward compatibility with attrs instances pickled with
+                # attrs versions before v22.2.0 which stored tuples.
+                for name, value in zip(state_attr_names, state):
+                    __bound_setattr(name, value)
+            else:
+                for name in state_attr_names:
+                    if name in state:
+                        __bound_setattr(name, state[name])
+
+            # The hash code cache is not included when the object is
+            # serialized, but it still needs to be initialized to None to
+            # indicate that the first call to __hash__ should be a cache
+            # miss.
+            if hash_caching_enabled:
+                __bound_setattr(_HASH_CACHE_FIELD, None)
+
+        return slots_getstate, slots_setstate
+
+    def make_unhashable(self):
+        self._cls_dict["__hash__"] = None
+        return self
+
+    def add_hash(self):
+        script, globs = _make_hash_script(
+            self._cls,
+            self._attrs,
+            frozen=self._frozen,
+            cache_hash=self._cache_hash,
+        )
+
+        def attach_hash(cls_dict: dict, locs: dict) -> None:
+            cls_dict["__hash__"] = self._add_method_dunders(locs["__hash__"])
+
+        self._script_snippets.append((script, globs, attach_hash))
+
+        return self
+
+    def add_init(self):
+        script, globs, annotations = _make_init_script(
+            self._cls,
+            self._attrs,
+            self._has_pre_init,
+            self._pre_init_has_args,
+            self._has_post_init,
+            self._frozen,
+            self._slots,
+            self._cache_hash,
+            self._base_attr_map,
+            self._is_exc,
+            self._on_setattr,
+            attrs_init=False,
+        )
+
+        def _attach_init(cls_dict, globs):
+            init = globs["__init__"]
+            init.__annotations__ = annotations
+            cls_dict["__init__"] = self._add_method_dunders(init)
+
+        self._script_snippets.append((script, globs, _attach_init))
+
+        return self
+
+    def add_replace(self):
+        self._cls_dict["__replace__"] = self._add_method_dunders(
+            lambda self, **changes: evolve(self, **changes)
+        )
+        return self
+
+    def add_match_args(self):
+        self._cls_dict["__match_args__"] = tuple(
+            field.name
+            for field in self._attrs
+            if field.init and not field.kw_only
+        )
+
+    def add_attrs_init(self):
+        script, globs, annotations = _make_init_script(
+            self._cls,
+            self._attrs,
+            self._has_pre_init,
+            self._pre_init_has_args,
+            self._has_post_init,
+            self._frozen,
+            self._slots,
+            self._cache_hash,
+            self._base_attr_map,
+            self._is_exc,
+            self._on_setattr,
+            attrs_init=True,
+        )
+
+        def _attach_attrs_init(cls_dict, globs):
+            init = globs["__attrs_init__"]
+            init.__annotations__ = annotations
+            cls_dict["__attrs_init__"] = self._add_method_dunders(init)
+
+        self._script_snippets.append((script, globs, _attach_attrs_init))
+
+        return self
+
+    def add_eq(self):
+        cd = self._cls_dict
+
+        script, globs = _make_eq_script(self._attrs)
+
+        def _attach_eq(cls_dict, globs):
+            cls_dict["__eq__"] = self._add_method_dunders(globs["__eq__"])
+
+        self._script_snippets.append((script, globs, _attach_eq))
+
+        cd["__ne__"] = __ne__
+
+        return self
+
+    def add_order(self):
+        cd = self._cls_dict
+
+        cd["__lt__"], cd["__le__"], cd["__gt__"], cd["__ge__"] = (
+            self._add_method_dunders(meth)
+            for meth in _make_order(self._cls, self._attrs)
+        )
+
+        return self
+
+    def add_setattr(self):
+        sa_attrs = {}
+        for a in self._attrs:
+            on_setattr = a.on_setattr or self._on_setattr
+            if on_setattr and on_setattr is not setters.NO_OP:
+                sa_attrs[a.name] = a, on_setattr
+
+        if not sa_attrs:
+            return self
+
+        if self._has_custom_setattr:
+            # We need to write a __setattr__ but there already is one!
+            msg = "Can't combine custom __setattr__ with on_setattr hooks."
+            raise ValueError(msg)
+
+        # docstring comes from _add_method_dunders
+        def __setattr__(self, name, val):
+            try:
+                a, hook = sa_attrs[name]
+            except KeyError:
+                nval = val
+            else:
+                nval = hook(self, a, val)
+
+            _OBJ_SETATTR(self, name, nval)
+
+        self._cls_dict["__attrs_own_setattr__"] = True
+        self._cls_dict["__setattr__"] = self._add_method_dunders(__setattr__)
+        self._wrote_own_setattr = True
+
+        return self
+
+    def _add_method_dunders_unsafe(self, method: Callable) -> Callable:
+        """
+        Add __module__ and __qualname__ to a *method*.
+        """
+        method.__module__ = self._cls.__module__
+
+        method.__qualname__ = f"{self._cls.__qualname__}.{method.__name__}"
+
+        method.__doc__ = (
+            f"Method generated by attrs for class {self._cls.__qualname__}."
+        )
+
+        return method
+
+    def _add_method_dunders_safe(self, method: Callable) -> Callable:
+        """
+        Add __module__ and __qualname__ to a *method* if possible.
+        """
+        with contextlib.suppress(AttributeError):
+            method.__module__ = self._cls.__module__
+
+        with contextlib.suppress(AttributeError):
+            method.__qualname__ = f"{self._cls.__qualname__}.{method.__name__}"
+
+        with contextlib.suppress(AttributeError):
+            method.__doc__ = f"Method generated by attrs for class {self._cls.__qualname__}."
+
+        return method
+
+
+def _determine_attrs_eq_order(cmp, eq, order, default_eq):
+    """
+    Validate the combination of *cmp*, *eq*, and *order*. Derive the effective
+    values of eq and order.  If *eq* is None, set it to *default_eq*.
+    """
+    if cmp is not None and any((eq is not None, order is not None)):
+        msg = "Don't mix `cmp` with `eq' and `order`."
+        raise ValueError(msg)
+
+    # cmp takes precedence due to bw-compatibility.
+    if cmp is not None:
+        return cmp, cmp
+
+    # If left None, equality is set to the specified default and ordering
+    # mirrors equality.
+    if eq is None:
+        eq = default_eq
+
+    if order is None:
+        order = eq
+
+    if eq is False and order is True:
+        msg = "`order` can only be True if `eq` is True too."
+        raise ValueError(msg)
+
+    return eq, order
+
+
+def _determine_attrib_eq_order(cmp, eq, order, default_eq):
+    """
+    Validate the combination of *cmp*, *eq*, and *order*. Derive the effective
+    values of eq and order.  If *eq* is None, set it to *default_eq*.
+    """
+    if cmp is not None and any((eq is not None, order is not None)):
+        msg = "Don't mix `cmp` with `eq' and `order`."
+        raise ValueError(msg)
+
+    def decide_callable_or_boolean(value):
+        """
+        Decide whether a key function is used.
+        """
+        if callable(value):
+            value, key = True, value
+        else:
+            key = None
+        return value, key
+
+    # cmp takes precedence due to bw-compatibility.
+    if cmp is not None:
+        cmp, cmp_key = decide_callable_or_boolean(cmp)
+        return cmp, cmp_key, cmp, cmp_key
+
+    # If left None, equality is set to the specified default and ordering
+    # mirrors equality.
+    if eq is None:
+        eq, eq_key = default_eq, None
+    else:
+        eq, eq_key = decide_callable_or_boolean(eq)
+
+    if order is None:
+        order, order_key = eq, eq_key
+    else:
+        order, order_key = decide_callable_or_boolean(order)
+
+    if eq is False and order is True:
+        msg = "`order` can only be True if `eq` is True too."
+        raise ValueError(msg)
+
+    return eq, eq_key, order, order_key
+
+
+def _determine_whether_to_implement(
+    cls, flag, auto_detect, dunders, default=True
+):
+    """
+    Check whether we should implement a set of methods for *cls*.
+
+    *flag* is the argument passed into @attr.s like 'init', *auto_detect* the
+    same as passed into @attr.s and *dunders* is a tuple of attribute names
+    whose presence signal that the user has implemented it themselves.
+
+    Return *default* if no reason for either for or against is found.
+    """
+    if flag is True or flag is False:
+        return flag
+
+    if flag is None and auto_detect is False:
+        return default
+
+    # Logically, flag is None and auto_detect is True here.
+    for dunder in dunders:
+        if _has_own_attribute(cls, dunder):
+            return False
+
+    return default
+
+
+def attrs(
+    maybe_cls=None,
+    these=None,
+    repr_ns=None,
+    repr=None,
+    cmp=None,
+    hash=None,
+    init=None,
+    slots=False,
+    frozen=False,
+    weakref_slot=True,
+    str=False,
+    auto_attribs=False,
+    kw_only=False,
+    cache_hash=False,
+    auto_exc=False,
+    eq=None,
+    order=None,
+    auto_detect=False,
+    collect_by_mro=False,
+    getstate_setstate=None,
+    on_setattr=None,
+    field_transformer=None,
+    match_args=True,
+    unsafe_hash=None,
+):
+    r"""
+    A class decorator that adds :term:`dunder methods` according to the
+    specified attributes using `attr.ib` or the *these* argument.
+
+    Consider using `attrs.define` / `attrs.frozen` in new code (``attr.s`` will
+    *never* go away, though).
+
+    Args:
+        repr_ns (str):
+            When using nested classes, there was no way in Python 2 to
+            automatically detect that.  This argument allows to set a custom
+            name for a more meaningful ``repr`` output.  This argument is
+            pointless in Python 3 and is therefore deprecated.
+
+    .. caution::
+        Refer to `attrs.define` for the rest of the parameters, but note that they
+        can have different defaults.
+
+        Notably, leaving *on_setattr* as `None` will **not** add any hooks.
+
+    .. versionadded:: 16.0.0 *slots*
+    .. versionadded:: 16.1.0 *frozen*
+    .. versionadded:: 16.3.0 *str*
+    .. versionadded:: 16.3.0 Support for ``__attrs_post_init__``.
+    .. versionchanged:: 17.1.0
+       *hash* supports `None` as value which is also the default now.
+    .. versionadded:: 17.3.0 *auto_attribs*
+    .. versionchanged:: 18.1.0
+       If *these* is passed, no attributes are deleted from the class body.
+    .. versionchanged:: 18.1.0 If *these* is ordered, the order is retained.
+    .. versionadded:: 18.2.0 *weakref_slot*
+    .. deprecated:: 18.2.0
+       ``__lt__``, ``__le__``, ``__gt__``, and ``__ge__`` now raise a
+       `DeprecationWarning` if the classes compared are subclasses of
+       each other. ``__eq`` and ``__ne__`` never tried to compared subclasses
+       to each other.
+    .. versionchanged:: 19.2.0
+       ``__lt__``, ``__le__``, ``__gt__``, and ``__ge__`` now do not consider
+       subclasses comparable anymore.
+    .. versionadded:: 18.2.0 *kw_only*
+    .. versionadded:: 18.2.0 *cache_hash*
+    .. versionadded:: 19.1.0 *auto_exc*
+    .. deprecated:: 19.2.0 *cmp* Removal on or after 2021-06-01.
+    .. versionadded:: 19.2.0 *eq* and *order*
+    .. versionadded:: 20.1.0 *auto_detect*
+    .. versionadded:: 20.1.0 *collect_by_mro*
+    .. versionadded:: 20.1.0 *getstate_setstate*
+    .. versionadded:: 20.1.0 *on_setattr*
+    .. versionadded:: 20.3.0 *field_transformer*
+    .. versionchanged:: 21.1.0
+       ``init=False`` injects ``__attrs_init__``
+    .. versionchanged:: 21.1.0 Support for ``__attrs_pre_init__``
+    .. versionchanged:: 21.1.0 *cmp* undeprecated
+    .. versionadded:: 21.3.0 *match_args*
+    .. versionadded:: 22.2.0
+       *unsafe_hash* as an alias for *hash* (for :pep:`681` compliance).
+    .. deprecated:: 24.1.0 *repr_ns*
+    .. versionchanged:: 24.1.0
+       Instances are not compared as tuples of attributes anymore, but using a
+       big ``and`` condition. This is faster and has more correct behavior for
+       uncomparable values like `math.nan`.
+    .. versionadded:: 24.1.0
+       If a class has an *inherited* classmethod called
+       ``__attrs_init_subclass__``, it is executed after the class is created.
+    .. deprecated:: 24.1.0 *hash* is deprecated in favor of *unsafe_hash*.
+    """
+    if repr_ns is not None:
+        import warnings
+
+        warnings.warn(
+            DeprecationWarning(
+                "The `repr_ns` argument is deprecated and will be removed in or after August 2025."
+            ),
+            stacklevel=2,
+        )
+
+    eq_, order_ = _determine_attrs_eq_order(cmp, eq, order, None)
+
+    #  unsafe_hash takes precedence due to PEP 681.
+    if unsafe_hash is not None:
+        hash = unsafe_hash
+
+    if isinstance(on_setattr, (list, tuple)):
+        on_setattr = setters.pipe(*on_setattr)
+
+    def wrap(cls):
+        is_frozen = frozen or _has_frozen_base_class(cls)
+        is_exc = auto_exc is True and issubclass(cls, BaseException)
+        has_own_setattr = auto_detect and _has_own_attribute(
+            cls, "__setattr__"
+        )
+
+        if has_own_setattr and is_frozen:
+            msg = "Can't freeze a class with a custom __setattr__."
+            raise ValueError(msg)
+
+        builder = _ClassBuilder(
+            cls,
+            these,
+            slots,
+            is_frozen,
+            weakref_slot,
+            _determine_whether_to_implement(
+                cls,
+                getstate_setstate,
+                auto_detect,
+                ("__getstate__", "__setstate__"),
+                default=slots,
+            ),
+            auto_attribs,
+            kw_only,
+            cache_hash,
+            is_exc,
+            collect_by_mro,
+            on_setattr,
+            has_own_setattr,
+            field_transformer,
+        )
+
+        if _determine_whether_to_implement(
+            cls, repr, auto_detect, ("__repr__",)
+        ):
+            builder.add_repr(repr_ns)
+
+        if str is True:
+            builder.add_str()
+
+        eq = _determine_whether_to_implement(
+            cls, eq_, auto_detect, ("__eq__", "__ne__")
+        )
+        if not is_exc and eq is True:
+            builder.add_eq()
+        if not is_exc and _determine_whether_to_implement(
+            cls, order_, auto_detect, ("__lt__", "__le__", "__gt__", "__ge__")
+        ):
+            builder.add_order()
+
+        if not frozen:
+            builder.add_setattr()
+
+        nonlocal hash
+        if (
+            hash is None
+            and auto_detect is True
+            and _has_own_attribute(cls, "__hash__")
+        ):
+            hash = False
+
+        if hash is not True and hash is not False and hash is not None:
+            # Can't use `hash in` because 1 == True for example.
+            msg = "Invalid value for hash.  Must be True, False, or None."
+            raise TypeError(msg)
+
+        if hash is False or (hash is None and eq is False) or is_exc:
+            # Don't do anything. Should fall back to __object__'s __hash__
+            # which is by id.
+            if cache_hash:
+                msg = "Invalid value for cache_hash.  To use hash caching, hashing must be either explicitly or implicitly enabled."
+                raise TypeError(msg)
+        elif hash is True or (
+            hash is None and eq is True and is_frozen is True
+        ):
+            # Build a __hash__ if told so, or if it's safe.
+            builder.add_hash()
+        else:
+            # Raise TypeError on attempts to hash.
+            if cache_hash:
+                msg = "Invalid value for cache_hash.  To use hash caching, hashing must be either explicitly or implicitly enabled."
+                raise TypeError(msg)
+            builder.make_unhashable()
+
+        if _determine_whether_to_implement(
+            cls, init, auto_detect, ("__init__",)
+        ):
+            builder.add_init()
+        else:
+            builder.add_attrs_init()
+            if cache_hash:
+                msg = "Invalid value for cache_hash.  To use hash caching, init must be True."
+                raise TypeError(msg)
+
+        if PY_3_13_PLUS and not _has_own_attribute(cls, "__replace__"):
+            builder.add_replace()
+
+        if (
+            PY_3_10_PLUS
+            and match_args
+            and not _has_own_attribute(cls, "__match_args__")
+        ):
+            builder.add_match_args()
+
+        return builder.build_class()
+
+    # maybe_cls's type depends on the usage of the decorator.  It's a class
+    # if it's used as `@attrs` but `None` if used as `@attrs()`.
+    if maybe_cls is None:
+        return wrap
+
+    return wrap(maybe_cls)
+
+
+_attrs = attrs
+"""
+Internal alias so we can use it in functions that take an argument called
+*attrs*.
+"""
+
+
+def _has_frozen_base_class(cls):
+    """
+    Check whether *cls* has a frozen ancestor by looking at its
+    __setattr__.
+    """
+    return cls.__setattr__ is _frozen_setattrs
+
+
+def _generate_unique_filename(cls: type, func_name: str) -> str:
+    """
+    Create a "filename" suitable for a function being generated.
+    """
+    return (
+        f"<attrs generated {func_name} {cls.__module__}."
+        f"{getattr(cls, '__qualname__', cls.__name__)}>"
+    )
+
+
+def _make_hash_script(
+    cls: type, attrs: list[Attribute], frozen: bool, cache_hash: bool
+) -> tuple[str, dict]:
+    attrs = tuple(
+        a for a in attrs if a.hash is True or (a.hash is None and a.eq is True)
+    )
+
+    tab = "        "
+
+    type_hash = hash(_generate_unique_filename(cls, "hash"))
+    # If eq is custom generated, we need to include the functions in globs
+    globs = {}
+
+    hash_def = "def __hash__(self"
+    hash_func = "hash(("
+    closing_braces = "))"
+    if not cache_hash:
+        hash_def += "):"
+    else:
+        hash_def += ", *"
+
+        hash_def += ", _cache_wrapper=__import__('attr._make')._make._CacheHashWrapper):"
+        hash_func = "_cache_wrapper(" + hash_func
+        closing_braces += ")"
+
+    method_lines = [hash_def]
+
+    def append_hash_computation_lines(prefix, indent):
+        """
+        Generate the code for actually computing the hash code.
+        Below this will either be returned directly or used to compute
+        a value which is then cached, depending on the value of cache_hash
+        """
+
+        method_lines.extend(
+            [
+                indent + prefix + hash_func,
+                indent + f"        {type_hash},",
+            ]
+        )
+
+        for a in attrs:
+            if a.eq_key:
+                cmp_name = f"_{a.name}_key"
+                globs[cmp_name] = a.eq_key
+                method_lines.append(
+                    indent + f"        {cmp_name}(self.{a.name}),"
+                )
+            else:
+                method_lines.append(indent + f"        self.{a.name},")
+
+        method_lines.append(indent + "    " + closing_braces)
+
+    if cache_hash:
+        method_lines.append(tab + f"if self.{_HASH_CACHE_FIELD} is None:")
+        if frozen:
+            append_hash_computation_lines(
+                f"object.__setattr__(self, '{_HASH_CACHE_FIELD}', ", tab * 2
+            )
+            method_lines.append(tab * 2 + ")")  # close __setattr__
+        else:
+            append_hash_computation_lines(
+                f"self.{_HASH_CACHE_FIELD} = ", tab * 2
+            )
+        method_lines.append(tab + f"return self.{_HASH_CACHE_FIELD}")
+    else:
+        append_hash_computation_lines("return ", tab)
+
+    script = "\n".join(method_lines)
+    return script, globs
+
+
+def _add_hash(cls: type, attrs: list[Attribute]):
+    """
+    Add a hash method to *cls*.
+    """
+    script, globs = _make_hash_script(
+        cls, attrs, frozen=False, cache_hash=False
+    )
+    _compile_and_eval(
+        script, globs, filename=_generate_unique_filename(cls, "__hash__")
+    )
+    cls.__hash__ = globs["__hash__"]
+    return cls
+
+
+def __ne__(self, other):
+    """
+    Check equality and either forward a NotImplemented or
+    return the result negated.
+    """
+    result = self.__eq__(other)
+    if result is NotImplemented:
+        return NotImplemented
+
+    return not result
+
+
+def _make_eq_script(attrs: list) -> tuple[str, dict]:
+    """
+    Create __eq__ method for *cls* with *attrs*.
+    """
+    attrs = [a for a in attrs if a.eq]
+
+    lines = [
+        "def __eq__(self, other):",
+        "    if other.__class__ is not self.__class__:",
+        "        return NotImplemented",
+    ]
+
+    globs = {}
+    if attrs:
+        lines.append("    return  (")
+        for a in attrs:
+            if a.eq_key:
+                cmp_name = f"_{a.name}_key"
+                # Add the key function to the global namespace
+                # of the evaluated function.
+                globs[cmp_name] = a.eq_key
+                lines.append(
+                    f"        {cmp_name}(self.{a.name}) == {cmp_name}(other.{a.name})"
+                )
+            else:
+                lines.append(f"        self.{a.name} == other.{a.name}")
+            if a is not attrs[-1]:
+                lines[-1] = f"{lines[-1]} and"
+        lines.append("    )")
+    else:
+        lines.append("    return True")
+
+    script = "\n".join(lines)
+
+    return script, globs
+
+
+def _make_order(cls, attrs):
+    """
+    Create ordering methods for *cls* with *attrs*.
+    """
+    attrs = [a for a in attrs if a.order]
+
+    def attrs_to_tuple(obj):
+        """
+        Save us some typing.
+        """
+        return tuple(
+            key(value) if key else value
+            for value, key in (
+                (getattr(obj, a.name), a.order_key) for a in attrs
+            )
+        )
+
+    def __lt__(self, other):
+        """
+        Automatically created by attrs.
+        """
+        if other.__class__ is self.__class__:
+            return attrs_to_tuple(self) < attrs_to_tuple(other)
+
+        return NotImplemented
+
+    def __le__(self, other):
+        """
+        Automatically created by attrs.
+        """
+        if other.__class__ is self.__class__:
+            return attrs_to_tuple(self) <= attrs_to_tuple(other)
+
+        return NotImplemented
+
+    def __gt__(self, other):
+        """
+        Automatically created by attrs.
+        """
+        if other.__class__ is self.__class__:
+            return attrs_to_tuple(self) > attrs_to_tuple(other)
+
+        return NotImplemented
+
+    def __ge__(self, other):
+        """
+        Automatically created by attrs.
+        """
+        if other.__class__ is self.__class__:
+            return attrs_to_tuple(self) >= attrs_to_tuple(other)
+
+        return NotImplemented
+
+    return __lt__, __le__, __gt__, __ge__
+
+
+def _add_eq(cls, attrs=None):
+    """
+    Add equality methods to *cls* with *attrs*.
+    """
+    if attrs is None:
+        attrs = cls.__attrs_attrs__
+
+    script, globs = _make_eq_script(attrs)
+    _compile_and_eval(
+        script, globs, filename=_generate_unique_filename(cls, "__eq__")
+    )
+    cls.__eq__ = globs["__eq__"]
+    cls.__ne__ = __ne__
+
+    return cls
+
+
+def _make_repr_script(attrs, ns) -> tuple[str, dict]:
+    """
+    Create the source and globs for a __repr__ and return it.
+    """
+    # Figure out which attributes to include, and which function to use to
+    # format them. The a.repr value can be either bool or a custom
+    # callable.
+    attr_names_with_reprs = tuple(
+        (a.name, (repr if a.repr is True else a.repr), a.init)
+        for a in attrs
+        if a.repr is not False
+    )
+    globs = {
+        name + "_repr": r for name, r, _ in attr_names_with_reprs if r != repr
+    }
+    globs["_compat"] = _compat
+    globs["AttributeError"] = AttributeError
+    globs["NOTHING"] = NOTHING
+    attribute_fragments = []
+    for name, r, i in attr_names_with_reprs:
+        accessor = (
+            "self." + name if i else 'getattr(self, "' + name + '", NOTHING)'
+        )
+        fragment = (
+            "%s={%s!r}" % (name, accessor)
+            if r == repr
+            else "%s={%s_repr(%s)}" % (name, name, accessor)
+        )
+        attribute_fragments.append(fragment)
+    repr_fragment = ", ".join(attribute_fragments)
+
+    if ns is None:
+        cls_name_fragment = '{self.__class__.__qualname__.rsplit(">.", 1)[-1]}'
+    else:
+        cls_name_fragment = ns + ".{self.__class__.__name__}"
+
+    lines = [
+        "def __repr__(self):",
+        "  try:",
+        "    already_repring = _compat.repr_context.already_repring",
+        "  except AttributeError:",
+        "    already_repring = {id(self),}",
+        "    _compat.repr_context.already_repring = already_repring",
+        "  else:",
+        "    if id(self) in already_repring:",
+        "      return '...'",
+        "    else:",
+        "      already_repring.add(id(self))",
+        "  try:",
+        f"    return f'{cls_name_fragment}({repr_fragment})'",
+        "  finally:",
+        "    already_repring.remove(id(self))",
+    ]
+
+    return "\n".join(lines), globs
+
+
+def _add_repr(cls, ns=None, attrs=None):
+    """
+    Add a repr method to *cls*.
+    """
+    if attrs is None:
+        attrs = cls.__attrs_attrs__
+
+    script, globs = _make_repr_script(attrs, ns)
+    _compile_and_eval(
+        script, globs, filename=_generate_unique_filename(cls, "__repr__")
+    )
+    cls.__repr__ = globs["__repr__"]
+    return cls
+
+
+def fields(cls):
+    """
+    Return the tuple of *attrs* attributes for a class.
+
+    The tuple also allows accessing the fields by their names (see below for
+    examples).
+
+    Args:
+        cls (type): Class to introspect.
+
+    Raises:
+        TypeError: If *cls* is not a class.
+
+        attrs.exceptions.NotAnAttrsClassError:
+            If *cls* is not an *attrs* class.
+
+    Returns:
+        tuple (with name accessors) of `attrs.Attribute`
+
+    .. versionchanged:: 16.2.0 Returned tuple allows accessing the fields
+       by name.
+    .. versionchanged:: 23.1.0 Add support for generic classes.
+    """
+    generic_base = get_generic_base(cls)
+
+    if generic_base is None and not isinstance(cls, type):
+        msg = "Passed object must be a class."
+        raise TypeError(msg)
+
+    attrs = getattr(cls, "__attrs_attrs__", None)
+
+    if attrs is None:
+        if generic_base is not None:
+            attrs = getattr(generic_base, "__attrs_attrs__", None)
+            if attrs is not None:
+                # Even though this is global state, stick it on here to speed
+                # it up. We rely on `cls` being cached for this to be
+                # efficient.
+                cls.__attrs_attrs__ = attrs
+                return attrs
+        msg = f"{cls!r} is not an attrs-decorated class."
+        raise NotAnAttrsClassError(msg)
+
+    return attrs
+
+
+def fields_dict(cls):
+    """
+    Return an ordered dictionary of *attrs* attributes for a class, whose keys
+    are the attribute names.
+
+    Args:
+        cls (type): Class to introspect.
+
+    Raises:
+        TypeError: If *cls* is not a class.
+
+        attrs.exceptions.NotAnAttrsClassError:
+            If *cls* is not an *attrs* class.
+
+    Returns:
+        dict[str, attrs.Attribute]: Dict of attribute name to definition
+
+    .. versionadded:: 18.1.0
+    """
+    if not isinstance(cls, type):
+        msg = "Passed object must be a class."
+        raise TypeError(msg)
+    attrs = getattr(cls, "__attrs_attrs__", None)
+    if attrs is None:
+        msg = f"{cls!r} is not an attrs-decorated class."
+        raise NotAnAttrsClassError(msg)
+    return {a.name: a for a in attrs}
+
+
+def validate(inst):
+    """
+    Validate all attributes on *inst* that have a validator.
+
+    Leaves all exceptions through.
+
+    Args:
+        inst: Instance of a class with *attrs* attributes.
+    """
+    if _config._run_validators is False:
+        return
+
+    for a in fields(inst.__class__):
+        v = a.validator
+        if v is not None:
+            v(inst, a, getattr(inst, a.name))
+
+
+def _is_slot_attr(a_name, base_attr_map):
+    """
+    Check if the attribute name comes from a slot class.
+    """
+    cls = base_attr_map.get(a_name)
+    return cls and "__slots__" in cls.__dict__
+
+
+def _make_init_script(
+    cls,
+    attrs,
+    pre_init,
+    pre_init_has_args,
+    post_init,
+    frozen,
+    slots,
+    cache_hash,
+    base_attr_map,
+    is_exc,
+    cls_on_setattr,
+    attrs_init,
+) -> tuple[str, dict, dict]:
+    has_cls_on_setattr = (
+        cls_on_setattr is not None and cls_on_setattr is not setters.NO_OP
+    )
+
+    if frozen and has_cls_on_setattr:
+        msg = "Frozen classes can't use on_setattr."
+        raise ValueError(msg)
+
+    needs_cached_setattr = cache_hash or frozen
+    filtered_attrs = []
+    attr_dict = {}
+    for a in attrs:
+        if not a.init and a.default is NOTHING:
+            continue
+
+        filtered_attrs.append(a)
+        attr_dict[a.name] = a
+
+        if a.on_setattr is not None:
+            if frozen is True:
+                msg = "Frozen classes can't use on_setattr."
+                raise ValueError(msg)
+
+            needs_cached_setattr = True
+        elif has_cls_on_setattr and a.on_setattr is not setters.NO_OP:
+            needs_cached_setattr = True
+
+    script, globs, annotations = _attrs_to_init_script(
+        filtered_attrs,
+        frozen,
+        slots,
+        pre_init,
+        pre_init_has_args,
+        post_init,
+        cache_hash,
+        base_attr_map,
+        is_exc,
+        needs_cached_setattr,
+        has_cls_on_setattr,
+        "__attrs_init__" if attrs_init else "__init__",
+    )
+    if cls.__module__ in sys.modules:
+        # This makes typing.get_type_hints(CLS.__init__) resolve string types.
+        globs.update(sys.modules[cls.__module__].__dict__)
+
+    globs.update({"NOTHING": NOTHING, "attr_dict": attr_dict})
+
+    if needs_cached_setattr:
+        # Save the lookup overhead in __init__ if we need to circumvent
+        # setattr hooks.
+        globs["_cached_setattr_get"] = _OBJ_SETATTR.__get__
+
+    return script, globs, annotations
+
+
+def _setattr(attr_name: str, value_var: str, has_on_setattr: bool) -> str:
+    """
+    Use the cached object.setattr to set *attr_name* to *value_var*.
+    """
+    return f"_setattr('{attr_name}', {value_var})"
+
+
+def _setattr_with_converter(
+    attr_name: str, value_var: str, has_on_setattr: bool, converter: Converter
+) -> str:
+    """
+    Use the cached object.setattr to set *attr_name* to *value_var*, but run
+    its converter first.
+    """
+    return f"_setattr('{attr_name}', {converter._fmt_converter_call(attr_name, value_var)})"
+
+
+def _assign(attr_name: str, value: str, has_on_setattr: bool) -> str:
+    """
+    Unless *attr_name* has an on_setattr hook, use normal assignment. Otherwise
+    relegate to _setattr.
+    """
+    if has_on_setattr:
+        return _setattr(attr_name, value, True)
+
+    return f"self.{attr_name} = {value}"
+
+
+def _assign_with_converter(
+    attr_name: str, value_var: str, has_on_setattr: bool, converter: Converter
+) -> str:
+    """
+    Unless *attr_name* has an on_setattr hook, use normal assignment after
+    conversion. Otherwise relegate to _setattr_with_converter.
+    """
+    if has_on_setattr:
+        return _setattr_with_converter(attr_name, value_var, True, converter)
+
+    return f"self.{attr_name} = {converter._fmt_converter_call(attr_name, value_var)}"
+
+
+def _determine_setters(
+    frozen: bool, slots: bool, base_attr_map: dict[str, type]
+):
+    """
+    Determine the correct setter functions based on whether a class is frozen
+    and/or slotted.
+    """
+    if frozen is True:
+        if slots is True:
+            return (), _setattr, _setattr_with_converter
+
+        # Dict frozen classes assign directly to __dict__.
+        # But only if the attribute doesn't come from an ancestor slot
+        # class.
+        # Note _inst_dict will be used again below if cache_hash is True
+
+        def fmt_setter(
+            attr_name: str, value_var: str, has_on_setattr: bool
+        ) -> str:
+            if _is_slot_attr(attr_name, base_attr_map):
+                return _setattr(attr_name, value_var, has_on_setattr)
+
+            return f"_inst_dict['{attr_name}'] = {value_var}"
+
+        def fmt_setter_with_converter(
+            attr_name: str,
+            value_var: str,
+            has_on_setattr: bool,
+            converter: Converter,
+        ) -> str:
+            if has_on_setattr or _is_slot_attr(attr_name, base_attr_map):
+                return _setattr_with_converter(
+                    attr_name, value_var, has_on_setattr, converter
+                )
+
+            return f"_inst_dict['{attr_name}'] = {converter._fmt_converter_call(attr_name, value_var)}"
+
+        return (
+            ("_inst_dict = self.__dict__",),
+            fmt_setter,
+            fmt_setter_with_converter,
+        )
+
+    # Not frozen -- we can just assign directly.
+    return (), _assign, _assign_with_converter
+
+
+def _attrs_to_init_script(
+    attrs: list[Attribute],
+    is_frozen: bool,
+    is_slotted: bool,
+    call_pre_init: bool,
+    pre_init_has_args: bool,
+    call_post_init: bool,
+    does_cache_hash: bool,
+    base_attr_map: dict[str, type],
+    is_exc: bool,
+    needs_cached_setattr: bool,
+    has_cls_on_setattr: bool,
+    method_name: str,
+) -> tuple[str, dict, dict]:
+    """
+    Return a script of an initializer for *attrs*, a dict of globals, and
+    annotations for the initializer.
+
+    The globals are required by the generated script.
+    """
+    lines = ["self.__attrs_pre_init__()"] if call_pre_init else []
+
+    if needs_cached_setattr:
+        lines.append(
+            # Circumvent the __setattr__ descriptor to save one lookup per
+            # assignment. Note _setattr will be used again below if
+            # does_cache_hash is True.
+            "_setattr = _cached_setattr_get(self)"
+        )
+
+    extra_lines, fmt_setter, fmt_setter_with_converter = _determine_setters(
+        is_frozen, is_slotted, base_attr_map
+    )
+    lines.extend(extra_lines)
+
+    args = []
+    kw_only_args = []
+    attrs_to_validate = []
+
+    # This is a dictionary of names to validator and converter callables.
+    # Injecting this into __init__ globals lets us avoid lookups.
+    names_for_globals = {}
+    annotations = {"return": None}
+
+    for a in attrs:
+        if a.validator:
+            attrs_to_validate.append(a)
+
+        attr_name = a.name
+        has_on_setattr = a.on_setattr is not None or (
+            a.on_setattr is not setters.NO_OP and has_cls_on_setattr
+        )
+        # a.alias is set to maybe-mangled attr_name in _ClassBuilder if not
+        # explicitly provided
+        arg_name = a.alias
+
+        has_factory = isinstance(a.default, Factory)
+        maybe_self = "self" if has_factory and a.default.takes_self else ""
+
+        if a.converter is not None and not isinstance(a.converter, Converter):
+            converter = Converter(a.converter)
+        else:
+            converter = a.converter
+
+        if a.init is False:
+            if has_factory:
+                init_factory_name = _INIT_FACTORY_PAT % (a.name,)
+                if converter is not None:
+                    lines.append(
+                        fmt_setter_with_converter(
+                            attr_name,
+                            init_factory_name + f"({maybe_self})",
+                            has_on_setattr,
+                            converter,
+                        )
+                    )
+                    names_for_globals[converter._get_global_name(a.name)] = (
+                        converter.converter
+                    )
+                else:
+                    lines.append(
+                        fmt_setter(
+                            attr_name,
+                            init_factory_name + f"({maybe_self})",
+                            has_on_setattr,
+                        )
+                    )
+                names_for_globals[init_factory_name] = a.default.factory
+            elif converter is not None:
+                lines.append(
+                    fmt_setter_with_converter(
+                        attr_name,
+                        f"attr_dict['{attr_name}'].default",
+                        has_on_setattr,
+                        converter,
+                    )
+                )
+                names_for_globals[converter._get_global_name(a.name)] = (
+                    converter.converter
+                )
+            else:
+                lines.append(
+                    fmt_setter(
+                        attr_name,
+                        f"attr_dict['{attr_name}'].default",
+                        has_on_setattr,
+                    )
+                )
+        elif a.default is not NOTHING and not has_factory:
+            arg = f"{arg_name}=attr_dict['{attr_name}'].default"
+            if a.kw_only:
+                kw_only_args.append(arg)
+            else:
+                args.append(arg)
+
+            if converter is not None:
+                lines.append(
+                    fmt_setter_with_converter(
+                        attr_name, arg_name, has_on_setattr, converter
+                    )
+                )
+                names_for_globals[converter._get_global_name(a.name)] = (
+                    converter.converter
+                )
+            else:
+                lines.append(fmt_setter(attr_name, arg_name, has_on_setattr))
+
+        elif has_factory:
+            arg = f"{arg_name}=NOTHING"
+            if a.kw_only:
+                kw_only_args.append(arg)
+            else:
+                args.append(arg)
+            lines.append(f"if {arg_name} is not NOTHING:")
+
+            init_factory_name = _INIT_FACTORY_PAT % (a.name,)
+            if converter is not None:
+                lines.append(
+                    "    "
+                    + fmt_setter_with_converter(
+                        attr_name, arg_name, has_on_setattr, converter
+                    )
+                )
+                lines.append("else:")
+                lines.append(
+                    "    "
+                    + fmt_setter_with_converter(
+                        attr_name,
+                        init_factory_name + "(" + maybe_self + ")",
+                        has_on_setattr,
+                        converter,
+                    )
+                )
+                names_for_globals[converter._get_global_name(a.name)] = (
+                    converter.converter
+                )
+            else:
+                lines.append(
+                    "    " + fmt_setter(attr_name, arg_name, has_on_setattr)
+                )
+                lines.append("else:")
+                lines.append(
+                    "    "
+                    + fmt_setter(
+                        attr_name,
+                        init_factory_name + "(" + maybe_self + ")",
+                        has_on_setattr,
+                    )
+                )
+            names_for_globals[init_factory_name] = a.default.factory
+        else:
+            if a.kw_only:
+                kw_only_args.append(arg_name)
+            else:
+                args.append(arg_name)
+
+            if converter is not None:
+                lines.append(
+                    fmt_setter_with_converter(
+                        attr_name, arg_name, has_on_setattr, converter
+                    )
+                )
+                names_for_globals[converter._get_global_name(a.name)] = (
+                    converter.converter
+                )
+            else:
+                lines.append(fmt_setter(attr_name, arg_name, has_on_setattr))
+
+        if a.init is True:
+            if a.type is not None and converter is None:
+                annotations[arg_name] = a.type
+            elif converter is not None and converter._first_param_type:
+                # Use the type from the converter if present.
+                annotations[arg_name] = converter._first_param_type
+
+    if attrs_to_validate:  # we can skip this if there are no validators.
+        names_for_globals["_config"] = _config
+        lines.append("if _config._run_validators is True:")
+        for a in attrs_to_validate:
+            val_name = "__attr_validator_" + a.name
+            attr_name = "__attr_" + a.name
+            lines.append(f"    {val_name}(self, {attr_name}, self.{a.name})")
+            names_for_globals[val_name] = a.validator
+            names_for_globals[attr_name] = a
+
+    if call_post_init:
+        lines.append("self.__attrs_post_init__()")
+
+    # Because this is set only after __attrs_post_init__ is called, a crash
+    # will result if post-init tries to access the hash code.  This seemed
+    # preferable to setting this beforehand, in which case alteration to field
+    # values during post-init combined with post-init accessing the hash code
+    # would result in silent bugs.
+    if does_cache_hash:
+        if is_frozen:
+            if is_slotted:
+                init_hash_cache = f"_setattr('{_HASH_CACHE_FIELD}', None)"
+            else:
+                init_hash_cache = f"_inst_dict['{_HASH_CACHE_FIELD}'] = None"
+        else:
+            init_hash_cache = f"self.{_HASH_CACHE_FIELD} = None"
+        lines.append(init_hash_cache)
+
+    # For exceptions we rely on BaseException.__init__ for proper
+    # initialization.
+    if is_exc:
+        vals = ",".join(f"self.{a.name}" for a in attrs if a.init)
+
+        lines.append(f"BaseException.__init__(self, {vals})")
+
+    args = ", ".join(args)
+    pre_init_args = args
+    if kw_only_args:
+        # leading comma & kw_only args
+        args += f"{', ' if args else ''}*, {', '.join(kw_only_args)}"
+        pre_init_kw_only_args = ", ".join(
+            [
+                f"{kw_arg_name}={kw_arg_name}"
+                # We need to remove the defaults from the kw_only_args.
+                for kw_arg_name in (kwa.split("=")[0] for kwa in kw_only_args)
+            ]
+        )
+        pre_init_args += ", " if pre_init_args else ""
+        pre_init_args += pre_init_kw_only_args
+
+    if call_pre_init and pre_init_has_args:
+        # If pre init method has arguments, pass same arguments as `__init__`.
+        lines[0] = f"self.__attrs_pre_init__({pre_init_args})"
+
+    # Python <3.12 doesn't allow backslashes in f-strings.
+    NL = "\n    "
+    return (
+        f"""def {method_name}(self, {args}):
+    {NL.join(lines) if lines else "pass"}
+""",
+        names_for_globals,
+        annotations,
+    )
+
+
+def _default_init_alias_for(name: str) -> str:
+    """
+    The default __init__ parameter name for a field.
+
+    This performs private-name adjustment via leading-unscore stripping,
+    and is the default value of Attribute.alias if not provided.
+    """
+
+    return name.lstrip("_")
+
+
+class Attribute:
+    """
+    *Read-only* representation of an attribute.
+
+    .. warning::
+
+       You should never instantiate this class yourself.
+
+    The class has *all* arguments of `attr.ib` (except for ``factory`` which is
+    only syntactic sugar for ``default=Factory(...)`` plus the following:
+
+    - ``name`` (`str`): The name of the attribute.
+    - ``alias`` (`str`): The __init__ parameter name of the attribute, after
+      any explicit overrides and default private-attribute-name handling.
+    - ``inherited`` (`bool`): Whether or not that attribute has been inherited
+      from a base class.
+    - ``eq_key`` and ``order_key`` (`typing.Callable` or `None`): The
+      callables that are used for comparing and ordering objects by this
+      attribute, respectively. These are set by passing a callable to
+      `attr.ib`'s ``eq``, ``order``, or ``cmp`` arguments. See also
+      :ref:`comparison customization <custom-comparison>`.
+
+    Instances of this class are frequently used for introspection purposes
+    like:
+
+    - `fields` returns a tuple of them.
+    - Validators get them passed as the first argument.
+    - The :ref:`field transformer <transform-fields>` hook receives a list of
+      them.
+    - The ``alias`` property exposes the __init__ parameter name of the field,
+      with any overrides and default private-attribute handling applied.
+
+
+    .. versionadded:: 20.1.0 *inherited*
+    .. versionadded:: 20.1.0 *on_setattr*
+    .. versionchanged:: 20.2.0 *inherited* is not taken into account for
+        equality checks and hashing anymore.
+    .. versionadded:: 21.1.0 *eq_key* and *order_key*
+    .. versionadded:: 22.2.0 *alias*
+
+    For the full version history of the fields, see `attr.ib`.
+    """
+
+    # These slots must NOT be reordered because we use them later for
+    # instantiation.
+    __slots__ = (  # noqa: RUF023
+        "name",
+        "default",
+        "validator",
+        "repr",
+        "eq",
+        "eq_key",
+        "order",
+        "order_key",
+        "hash",
+        "init",
+        "metadata",
+        "type",
+        "converter",
+        "kw_only",
+        "inherited",
+        "on_setattr",
+        "alias",
+    )
+
+    def __init__(
+        self,
+        name,
+        default,
+        validator,
+        repr,
+        cmp,  # XXX: unused, remove along with other cmp code.
+        hash,
+        init,
+        inherited,
+        metadata=None,
+        type=None,
+        converter=None,
+        kw_only=False,
+        eq=None,
+        eq_key=None,
+        order=None,
+        order_key=None,
+        on_setattr=None,
+        alias=None,
+    ):
+        eq, eq_key, order, order_key = _determine_attrib_eq_order(
+            cmp, eq_key or eq, order_key or order, True
+        )
+
+        # Cache this descriptor here to speed things up later.
+        bound_setattr = _OBJ_SETATTR.__get__(self)
+
+        # Despite the big red warning, people *do* instantiate `Attribute`
+        # themselves.
+        bound_setattr("name", name)
+        bound_setattr("default", default)
+        bound_setattr("validator", validator)
+        bound_setattr("repr", repr)
+        bound_setattr("eq", eq)
+        bound_setattr("eq_key", eq_key)
+        bound_setattr("order", order)
+        bound_setattr("order_key", order_key)
+        bound_setattr("hash", hash)
+        bound_setattr("init", init)
+        bound_setattr("converter", converter)
+        bound_setattr(
+            "metadata",
+            (
+                types.MappingProxyType(dict(metadata))  # Shallow copy
+                if metadata
+                else _EMPTY_METADATA_SINGLETON
+            ),
+        )
+        bound_setattr("type", type)
+        bound_setattr("kw_only", kw_only)
+        bound_setattr("inherited", inherited)
+        bound_setattr("on_setattr", on_setattr)
+        bound_setattr("alias", alias)
+
+    def __setattr__(self, name, value):
+        raise FrozenInstanceError
+
+    @classmethod
+    def from_counting_attr(cls, name: str, ca: _CountingAttr, type=None):
+        # type holds the annotated value. deal with conflicts:
+        if type is None:
+            type = ca.type
+        elif ca.type is not None:
+            msg = f"Type annotation and type argument cannot both be present for '{name}'."
+            raise ValueError(msg)
+        return cls(
+            name,
+            ca._default,
+            ca._validator,
+            ca.repr,
+            None,
+            ca.hash,
+            ca.init,
+            False,
+            ca.metadata,
+            type,
+            ca.converter,
+            ca.kw_only,
+            ca.eq,
+            ca.eq_key,
+            ca.order,
+            ca.order_key,
+            ca.on_setattr,
+            ca.alias,
+        )
+
+    # Don't use attrs.evolve since fields(Attribute) doesn't work
+    def evolve(self, **changes):
+        """
+        Copy *self* and apply *changes*.
+
+        This works similarly to `attrs.evolve` but that function does not work
+        with :class:`attrs.Attribute`.
+
+        It is mainly meant to be used for `transform-fields`.
+
+        .. versionadded:: 20.3.0
+        """
+        new = copy.copy(self)
+
+        new._setattrs(changes.items())
+
+        return new
+
+    # Don't use _add_pickle since fields(Attribute) doesn't work
+    def __getstate__(self):
+        """
+        Play nice with pickle.
+        """
+        return tuple(
+            getattr(self, name) if name != "metadata" else dict(self.metadata)
+            for name in self.__slots__
+        )
+
+    def __setstate__(self, state):
+        """
+        Play nice with pickle.
+        """
+        self._setattrs(zip(self.__slots__, state))
+
+    def _setattrs(self, name_values_pairs):
+        bound_setattr = _OBJ_SETATTR.__get__(self)
+        for name, value in name_values_pairs:
+            if name != "metadata":
+                bound_setattr(name, value)
+            else:
+                bound_setattr(
+                    name,
+                    (
+                        types.MappingProxyType(dict(value))
+                        if value
+                        else _EMPTY_METADATA_SINGLETON
+                    ),
+                )
+
+
+_a = [
+    Attribute(
+        name=name,
+        default=NOTHING,
+        validator=None,
+        repr=True,
+        cmp=None,
+        eq=True,
+        order=False,
+        hash=(name != "metadata"),
+        init=True,
+        inherited=False,
+        alias=_default_init_alias_for(name),
+    )
+    for name in Attribute.__slots__
+]
+
+Attribute = _add_hash(
+    _add_eq(
+        _add_repr(Attribute, attrs=_a),
+        attrs=[a for a in _a if a.name != "inherited"],
+    ),
+    attrs=[a for a in _a if a.hash and a.name != "inherited"],
+)
+
+
+class _CountingAttr:
+    """
+    Intermediate representation of attributes that uses a counter to preserve
+    the order in which the attributes have been defined.
+
+    *Internal* data structure of the attrs library.  Running into is most
+    likely the result of a bug like a forgotten `@attr.s` decorator.
+    """
+
+    __slots__ = (
+        "_default",
+        "_validator",
+        "alias",
+        "converter",
+        "counter",
+        "eq",
+        "eq_key",
+        "hash",
+        "init",
+        "kw_only",
+        "metadata",
+        "on_setattr",
+        "order",
+        "order_key",
+        "repr",
+        "type",
+    )
+    __attrs_attrs__ = (
+        *tuple(
+            Attribute(
+                name=name,
+                alias=_default_init_alias_for(name),
+                default=NOTHING,
+                validator=None,
+                repr=True,
+                cmp=None,
+                hash=True,
+                init=True,
+                kw_only=False,
+                eq=True,
+                eq_key=None,
+                order=False,
+                order_key=None,
+                inherited=False,
+                on_setattr=None,
+            )
+            for name in (
+                "counter",
+                "_default",
+                "repr",
+                "eq",
+                "order",
+                "hash",
+                "init",
+                "on_setattr",
+                "alias",
+            )
+        ),
+        Attribute(
+            name="metadata",
+            alias="metadata",
+            default=None,
+            validator=None,
+            repr=True,
+            cmp=None,
+            hash=False,
+            init=True,
+            kw_only=False,
+            eq=True,
+            eq_key=None,
+            order=False,
+            order_key=None,
+            inherited=False,
+            on_setattr=None,
+        ),
+    )
+    cls_counter = 0
+
+    def __init__(
+        self,
+        default,
+        validator,
+        repr,
+        cmp,
+        hash,
+        init,
+        converter,
+        metadata,
+        type,
+        kw_only,
+        eq,
+        eq_key,
+        order,
+        order_key,
+        on_setattr,
+        alias,
+    ):
+        _CountingAttr.cls_counter += 1
+        self.counter = _CountingAttr.cls_counter
+        self._default = default
+        self._validator = validator
+        self.converter = converter
+        self.repr = repr
+        self.eq = eq
+        self.eq_key = eq_key
+        self.order = order
+        self.order_key = order_key
+        self.hash = hash
+        self.init = init
+        self.metadata = metadata
+        self.type = type
+        self.kw_only = kw_only
+        self.on_setattr = on_setattr
+        self.alias = alias
+
+    def validator(self, meth):
+        """
+        Decorator that adds *meth* to the list of validators.
+
+        Returns *meth* unchanged.
+
+        .. versionadded:: 17.1.0
+        """
+        if self._validator is None:
+            self._validator = meth
+        else:
+            self._validator = and_(self._validator, meth)
+        return meth
+
+    def default(self, meth):
+        """
+        Decorator that allows to set the default for an attribute.
+
+        Returns *meth* unchanged.
+
+        Raises:
+            DefaultAlreadySetError: If default has been set before.
+
+        .. versionadded:: 17.1.0
+        """
+        if self._default is not NOTHING:
+            raise DefaultAlreadySetError
+
+        self._default = Factory(meth, takes_self=True)
+
+        return meth
+
+
+_CountingAttr = _add_eq(_add_repr(_CountingAttr))
+
+
+class Factory:
+    """
+    Stores a factory callable.
+
+    If passed as the default value to `attrs.field`, the factory is used to
+    generate a new value.
+
+    Args:
+        factory (typing.Callable):
+            A callable that takes either none or exactly one mandatory
+            positional argument depending on *takes_self*.
+
+        takes_self (bool):
+            Pass the partially initialized instance that is being initialized
+            as a positional argument.
+
+    .. versionadded:: 17.1.0  *takes_self*
+    """
+
+    __slots__ = ("factory", "takes_self")
+
+    def __init__(self, factory, takes_self=False):
+        self.factory = factory
+        self.takes_self = takes_self
+
+    def __getstate__(self):
+        """
+        Play nice with pickle.
+        """
+        return tuple(getattr(self, name) for name in self.__slots__)
+
+    def __setstate__(self, state):
+        """
+        Play nice with pickle.
+        """
+        for name, value in zip(self.__slots__, state):
+            setattr(self, name, value)
+
+
+_f = [
+    Attribute(
+        name=name,
+        default=NOTHING,
+        validator=None,
+        repr=True,
+        cmp=None,
+        eq=True,
+        order=False,
+        hash=True,
+        init=True,
+        inherited=False,
+    )
+    for name in Factory.__slots__
+]
+
+Factory = _add_hash(_add_eq(_add_repr(Factory, attrs=_f), attrs=_f), attrs=_f)
+
+
+class Converter:
+    """
+    Stores a converter callable.
+
+    Allows for the wrapped converter to take additional arguments. The
+    arguments are passed in the order they are documented.
+
+    Args:
+        converter (Callable): A callable that converts the passed value.
+
+        takes_self (bool):
+            Pass the partially initialized instance that is being initialized
+            as a positional argument. (default: `False`)
+
+        takes_field (bool):
+            Pass the field definition (an :class:`Attribute`) into the
+            converter as a positional argument. (default: `False`)
+
+    .. versionadded:: 24.1.0
+    """
+
+    __slots__ = (
+        "__call__",
+        "_first_param_type",
+        "_global_name",
+        "converter",
+        "takes_field",
+        "takes_self",
+    )
+
+    def __init__(self, converter, *, takes_self=False, takes_field=False):
+        self.converter = converter
+        self.takes_self = takes_self
+        self.takes_field = takes_field
+
+        ex = _AnnotationExtractor(converter)
+        self._first_param_type = ex.get_first_param_type()
+
+        if not (self.takes_self or self.takes_field):
+            self.__call__ = lambda value, _, __: self.converter(value)
+        elif self.takes_self and not self.takes_field:
+            self.__call__ = lambda value, instance, __: self.converter(
+                value, instance
+            )
+        elif not self.takes_self and self.takes_field:
+            self.__call__ = lambda value, __, field: self.converter(
+                value, field
+            )
+        else:
+            self.__call__ = lambda value, instance, field: self.converter(
+                value, instance, field
+            )
+
+        rt = ex.get_return_type()
+        if rt is not None:
+            self.__call__.__annotations__["return"] = rt
+
+    @staticmethod
+    def _get_global_name(attr_name: str) -> str:
+        """
+        Return the name that a converter for an attribute name *attr_name*
+        would have.
+        """
+        return f"__attr_converter_{attr_name}"
+
+    def _fmt_converter_call(self, attr_name: str, value_var: str) -> str:
+        """
+        Return a string that calls the converter for an attribute name
+        *attr_name* and the value in variable named *value_var* according to
+        `self.takes_self` and `self.takes_field`.
+        """
+        if not (self.takes_self or self.takes_field):
+            return f"{self._get_global_name(attr_name)}({value_var})"
+
+        if self.takes_self and self.takes_field:
+            return f"{self._get_global_name(attr_name)}({value_var}, self, attr_dict['{attr_name}'])"
+
+        if self.takes_self:
+            return f"{self._get_global_name(attr_name)}({value_var}, self)"
+
+        return f"{self._get_global_name(attr_name)}({value_var}, attr_dict['{attr_name}'])"
+
+    def __getstate__(self):
+        """
+        Return a dict containing only converter and takes_self -- the rest gets
+        computed when loading.
+        """
+        return {
+            "converter": self.converter,
+            "takes_self": self.takes_self,
+            "takes_field": self.takes_field,
+        }
+
+    def __setstate__(self, state):
+        """
+        Load instance from state.
+        """
+        self.__init__(**state)
+
+
+_f = [
+    Attribute(
+        name=name,
+        default=NOTHING,
+        validator=None,
+        repr=True,
+        cmp=None,
+        eq=True,
+        order=False,
+        hash=True,
+        init=True,
+        inherited=False,
+    )
+    for name in ("converter", "takes_self", "takes_field")
+]
+
+Converter = _add_hash(
+    _add_eq(_add_repr(Converter, attrs=_f), attrs=_f), attrs=_f
+)
+
+
+def make_class(
+    name, attrs, bases=(object,), class_body=None, **attributes_arguments
+):
+    r"""
+    A quick way to create a new class called *name* with *attrs*.
+
+    .. note::
+
+        ``make_class()`` is a thin wrapper around `attr.s`, not `attrs.define`
+        which means that it doesn't come with some of the improved defaults.
+
+        For example, if you want the same ``on_setattr`` behavior as in
+        `attrs.define`, you have to pass the hooks yourself: ``make_class(...,
+        on_setattr=setters.pipe(setters.convert, setters.validate)``
+
+    .. warning::
+
+        It is *your* duty to ensure that the class name and the attribute names
+        are valid identifiers. ``make_class()`` will *not* validate them for
+        you.
+
+    Args:
+        name (str): The name for the new class.
+
+        attrs (list | dict):
+            A list of names or a dictionary of mappings of names to `attr.ib`\
+            s / `attrs.field`\ s.
+
+            The order is deduced from the order of the names or attributes
+            inside *attrs*.  Otherwise the order of the definition of the
+            attributes is used.
+
+        bases (tuple[type, ...]): Classes that the new class will subclass.
+
+        class_body (dict):
+            An optional dictionary of class attributes for the new class.
+
+        attributes_arguments: Passed unmodified to `attr.s`.
+
+    Returns:
+        type: A new class with *attrs*.
+
+    .. versionadded:: 17.1.0 *bases*
+    .. versionchanged:: 18.1.0 If *attrs* is ordered, the order is retained.
+    .. versionchanged:: 23.2.0 *class_body*
+    .. versionchanged:: 25.2.0 Class names can now be unicode.
+    """
+    # Class identifiers are converted into the normal form NFKC while parsing
+    name = unicodedata.normalize("NFKC", name)
+
+    if isinstance(attrs, dict):
+        cls_dict = attrs
+    elif isinstance(attrs, (list, tuple)):
+        cls_dict = {a: attrib() for a in attrs}
+    else:
+        msg = "attrs argument must be a dict or a list."
+        raise TypeError(msg)
+
+    pre_init = cls_dict.pop("__attrs_pre_init__", None)
+    post_init = cls_dict.pop("__attrs_post_init__", None)
+    user_init = cls_dict.pop("__init__", None)
+
+    body = {}
+    if class_body is not None:
+        body.update(class_body)
+    if pre_init is not None:
+        body["__attrs_pre_init__"] = pre_init
+    if post_init is not None:
+        body["__attrs_post_init__"] = post_init
+    if user_init is not None:
+        body["__init__"] = user_init
+
+    type_ = types.new_class(name, bases, {}, lambda ns: ns.update(body))
+
+    # For pickling to work, the __module__ variable needs to be set to the
+    # frame where the class is created.  Bypass this step in environments where
+    # sys._getframe is not defined (Jython for example) or sys._getframe is not
+    # defined for arguments greater than 0 (IronPython).
+    with contextlib.suppress(AttributeError, ValueError):
+        type_.__module__ = sys._getframe(1).f_globals.get(
+            "__name__", "__main__"
+        )
+
+    # We do it here for proper warnings with meaningful stacklevel.
+    cmp = attributes_arguments.pop("cmp", None)
+    (
+        attributes_arguments["eq"],
+        attributes_arguments["order"],
+    ) = _determine_attrs_eq_order(
+        cmp,
+        attributes_arguments.get("eq"),
+        attributes_arguments.get("order"),
+        True,
+    )
+
+    cls = _attrs(these=cls_dict, **attributes_arguments)(type_)
+    # Only add type annotations now or "_attrs()" will complain:
+    cls.__annotations__ = {
+        k: v.type for k, v in cls_dict.items() if v.type is not None
+    }
+    return cls
+
+
+# These are required by within this module so we define them here and merely
+# import into .validators / .converters.
+
+
+@attrs(slots=True, unsafe_hash=True)
+class _AndValidator:
+    """
+    Compose many validators to a single one.
+    """
+
+    _validators = attrib()
+
+    def __call__(self, inst, attr, value):
+        for v in self._validators:
+            v(inst, attr, value)
+
+
+def and_(*validators):
+    """
+    A validator that composes multiple validators into one.
+
+    When called on a value, it runs all wrapped validators.
+
+    Args:
+        validators (~collections.abc.Iterable[typing.Callable]):
+            Arbitrary number of validators.
+
+    .. versionadded:: 17.1.0
+    """
+    vals = []
+    for validator in validators:
+        vals.extend(
+            validator._validators
+            if isinstance(validator, _AndValidator)
+            else [validator]
+        )
+
+    return _AndValidator(tuple(vals))
+
+
+def pipe(*converters):
+    """
+    A converter that composes multiple converters into one.
+
+    When called on a value, it runs all wrapped converters, returning the
+    *last* value.
+
+    Type annotations will be inferred from the wrapped converters', if they
+    have any.
+
+        converters (~collections.abc.Iterable[typing.Callable]):
+            Arbitrary number of converters.
+
+    .. versionadded:: 20.1.0
+    """
+
+    return_instance = any(isinstance(c, Converter) for c in converters)
+
+    if return_instance:
+
+        def pipe_converter(val, inst, field):
+            for c in converters:
+                val = (
+                    c(val, inst, field) if isinstance(c, Converter) else c(val)
+                )
+
+            return val
+
+    else:
+
+        def pipe_converter(val):
+            for c in converters:
+                val = c(val)
+
+            return val
+
+    if not converters:
+        # If the converter list is empty, pipe_converter is the identity.
+        A = TypeVar("A")
+        pipe_converter.__annotations__.update({"val": A, "return": A})
+    else:
+        # Get parameter type from first converter.
+        t = _AnnotationExtractor(converters[0]).get_first_param_type()
+        if t:
+            pipe_converter.__annotations__["val"] = t
+
+        last = converters[-1]
+        if not PY_3_11_PLUS and isinstance(last, Converter):
+            last = last.__call__
+
+        # Get return type from last converter.
+        rt = _AnnotationExtractor(last).get_return_type()
+        if rt:
+            pipe_converter.__annotations__["return"] = rt
+
+    if return_instance:
+        return Converter(pipe_converter, takes_self=True, takes_field=True)
+    return pipe_converter
diff --git a/.venv/lib/python3.12/site-packages/attr/_next_gen.py b/.venv/lib/python3.12/site-packages/attr/_next_gen.py
new file mode 100644
index 00000000..9290664b
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/attr/_next_gen.py
@@ -0,0 +1,623 @@
+# SPDX-License-Identifier: MIT
+
+"""
+These are keyword-only APIs that call `attr.s` and `attr.ib` with different
+default values.
+"""
+
+from functools import partial
+
+from . import setters
+from ._funcs import asdict as _asdict
+from ._funcs import astuple as _astuple
+from ._make import (
+    _DEFAULT_ON_SETATTR,
+    NOTHING,
+    _frozen_setattrs,
+    attrib,
+    attrs,
+)
+from .exceptions import UnannotatedAttributeError
+
+
+def define(
+    maybe_cls=None,
+    *,
+    these=None,
+    repr=None,
+    unsafe_hash=None,
+    hash=None,
+    init=None,
+    slots=True,
+    frozen=False,
+    weakref_slot=True,
+    str=False,
+    auto_attribs=None,
+    kw_only=False,
+    cache_hash=False,
+    auto_exc=True,
+    eq=None,
+    order=False,
+    auto_detect=True,
+    getstate_setstate=None,
+    on_setattr=None,
+    field_transformer=None,
+    match_args=True,
+):
+    r"""
+    A class decorator that adds :term:`dunder methods` according to
+    :term:`fields <field>` specified using :doc:`type annotations <types>`,
+    `field()` calls, or the *these* argument.
+
+    Since *attrs* patches or replaces an existing class, you cannot use
+    `object.__init_subclass__` with *attrs* classes, because it runs too early.
+    As a replacement, you can define ``__attrs_init_subclass__`` on your class.
+    It will be called by *attrs* classes that subclass it after they're
+    created. See also :ref:`init-subclass`.
+
+    Args:
+        slots (bool):
+            Create a :term:`slotted class <slotted classes>` that's more
+            memory-efficient. Slotted classes are generally superior to the
+            default dict classes, but have some gotchas you should know about,
+            so we encourage you to read the :term:`glossary entry <slotted
+            classes>`.
+
+        auto_detect (bool):
+            Instead of setting the *init*, *repr*, *eq*, and *hash* arguments
+            explicitly, assume they are set to True **unless any** of the
+            involved methods for one of the arguments is implemented in the
+            *current* class (meaning, it is *not* inherited from some base
+            class).
+
+            So, for example by implementing ``__eq__`` on a class yourself,
+            *attrs* will deduce ``eq=False`` and will create *neither*
+            ``__eq__`` *nor* ``__ne__`` (but Python classes come with a
+            sensible ``__ne__`` by default, so it *should* be enough to only
+            implement ``__eq__`` in most cases).
+
+            Passing True or False` to *init*, *repr*, *eq*, or *hash*
+            overrides whatever *auto_detect* would determine.
+
+        auto_exc (bool):
+            If the class subclasses `BaseException` (which implicitly includes
+            any subclass of any exception), the following happens to behave
+            like a well-behaved Python exception class:
+
+            - the values for *eq*, *order*, and *hash* are ignored and the
+              instances compare and hash by the instance's ids [#]_ ,
+            - all attributes that are either passed into ``__init__`` or have a
+              default value are additionally available as a tuple in the
+              ``args`` attribute,
+            - the value of *str* is ignored leaving ``__str__`` to base
+              classes.
+
+            .. [#]
+               Note that *attrs* will *not* remove existing implementations of
+               ``__hash__`` or the equality methods. It just won't add own
+               ones.
+
+        on_setattr (~typing.Callable | list[~typing.Callable] | None | ~typing.Literal[attrs.setters.NO_OP]):
+            A callable that is run whenever the user attempts to set an
+            attribute (either by assignment like ``i.x = 42`` or by using
+            `setattr` like ``setattr(i, "x", 42)``). It receives the same
+            arguments as validators: the instance, the attribute that is being
+            modified, and the new value.
+
+            If no exception is raised, the attribute is set to the return value
+            of the callable.
+
+            If a list of callables is passed, they're automatically wrapped in
+            an `attrs.setters.pipe`.
+
+            If left None, the default behavior is to run converters and
+            validators whenever an attribute is set.
+
+        init (bool):
+            Create a ``__init__`` method that initializes the *attrs*
+            attributes. Leading underscores are stripped for the argument name,
+            unless an alias is set on the attribute.
+
+            .. seealso::
+                `init` shows advanced ways to customize the generated
+                ``__init__`` method, including executing code before and after.
+
+        repr(bool):
+            Create a ``__repr__`` method with a human readable representation
+            of *attrs* attributes.
+
+        str (bool):
+            Create a ``__str__`` method that is identical to ``__repr__``. This
+            is usually not necessary except for `Exception`\ s.
+
+        eq (bool | None):
+            If True or None (default), add ``__eq__`` and ``__ne__`` methods
+            that check two instances for equality.
+
+            .. seealso::
+                `comparison` describes how to customize the comparison behavior
+                going as far comparing NumPy arrays.
+
+        order (bool | None):
+            If True, add ``__lt__``, ``__le__``, ``__gt__``, and ``__ge__``
+            methods that behave like *eq* above and allow instances to be
+            ordered.
+
+            They compare the instances as if they were tuples of their *attrs*
+            attributes if and only if the types of both classes are
+            *identical*.
+
+            If `None` mirror value of *eq*.
+
+            .. seealso:: `comparison`
+
+        unsafe_hash (bool | None):
+            If None (default), the ``__hash__`` method is generated according
+            how *eq* and *frozen* are set.
+
+            1. If *both* are True, *attrs* will generate a ``__hash__`` for
+               you.
+            2. If *eq* is True and *frozen* is False, ``__hash__`` will be set
+               to None, marking it unhashable (which it is).
+            3. If *eq* is False, ``__hash__`` will be left untouched meaning
+               the ``__hash__`` method of the base class will be used. If the
+               base class is `object`, this means it will fall back to id-based
+               hashing.
+
+            Although not recommended, you can decide for yourself and force
+            *attrs* to create one (for example, if the class is immutable even
+            though you didn't freeze it programmatically) by passing True or
+            not.  Both of these cases are rather special and should be used
+            carefully.
+
+            .. seealso::
+
+                - Our documentation on `hashing`,
+                - Python's documentation on `object.__hash__`,
+                - and the `GitHub issue that led to the default \ behavior
+                  <https://github.com/python-attrs/attrs/issues/136>`_ for more
+                  details.
+
+        hash (bool | None):
+            Deprecated alias for *unsafe_hash*. *unsafe_hash* takes precedence.
+
+        cache_hash (bool):
+            Ensure that the object's hash code is computed only once and stored
+            on the object.  If this is set to True, hashing must be either
+            explicitly or implicitly enabled for this class.  If the hash code
+            is cached, avoid any reassignments of fields involved in hash code
+            computation or mutations of the objects those fields point to after
+            object creation.  If such changes occur, the behavior of the
+            object's hash code is undefined.
+
+        frozen (bool):
+            Make instances immutable after initialization.  If someone attempts
+            to modify a frozen instance, `attrs.exceptions.FrozenInstanceError`
+            is raised.
+
+            .. note::
+
+                1. This is achieved by installing a custom ``__setattr__``
+                   method on your class, so you can't implement your own.
+
+                2. True immutability is impossible in Python.
+
+                3. This *does* have a minor a runtime performance `impact
+                   <how-frozen>` when initializing new instances.  In other
+                   words: ``__init__`` is slightly slower with ``frozen=True``.
+
+                4. If a class is frozen, you cannot modify ``self`` in
+                   ``__attrs_post_init__`` or a self-written ``__init__``. You
+                   can circumvent that limitation by using
+                   ``object.__setattr__(self, "attribute_name", value)``.
+
+                5. Subclasses of a frozen class are frozen too.
+
+        kw_only (bool):
+            Make all attributes keyword-only in the generated ``__init__`` (if
+            *init* is False, this parameter is ignored).
+
+        weakref_slot (bool):
+            Make instances weak-referenceable.  This has no effect unless
+            *slots* is True.
+
+        field_transformer (~typing.Callable | None):
+            A function that is called with the original class object and all
+            fields right before *attrs* finalizes the class.  You can use this,
+            for example, to automatically add converters or validators to
+            fields based on their types.
+
+            .. seealso:: `transform-fields`
+
+        match_args (bool):
+            If True (default), set ``__match_args__`` on the class to support
+            :pep:`634` (*Structural Pattern Matching*). It is a tuple of all
+            non-keyword-only ``__init__`` parameter names on Python 3.10 and
+            later. Ignored on older Python versions.
+
+        collect_by_mro (bool):
+            If True, *attrs* collects attributes from base classes correctly
+            according to the `method resolution order
+            <https://docs.python.org/3/howto/mro.html>`_. If False, *attrs*
+            will mimic the (wrong) behavior of `dataclasses` and :pep:`681`.
+
+            See also `issue #428
+            <https://github.com/python-attrs/attrs/issues/428>`_.
+
+        getstate_setstate (bool | None):
+            .. note::
+
+                This is usually only interesting for slotted classes and you
+                should probably just set *auto_detect* to True.
+
+            If True, ``__getstate__`` and ``__setstate__`` are generated and
+            attached to the class. This is necessary for slotted classes to be
+            pickleable. If left None, it's True by default for slotted classes
+            and False for dict classes.
+
+            If *auto_detect* is True, and *getstate_setstate* is left None, and
+            **either** ``__getstate__`` or ``__setstate__`` is detected
+            directly on the class (meaning: not inherited), it is set to False
+            (this is usually what you want).
+
+        auto_attribs (bool | None):
+            If True, look at type annotations to determine which attributes to
+            use, like `dataclasses`. If False, it will only look for explicit
+            :func:`field` class attributes, like classic *attrs*.
+
+            If left None, it will guess:
+
+            1. If any attributes are annotated and no unannotated
+               `attrs.field`\ s are found, it assumes *auto_attribs=True*.
+            2. Otherwise it assumes *auto_attribs=False* and tries to collect
+               `attrs.field`\ s.
+
+            If *attrs* decides to look at type annotations, **all** fields
+            **must** be annotated. If *attrs* encounters a field that is set to
+            a :func:`field` / `attr.ib` but lacks a type annotation, an
+            `attrs.exceptions.UnannotatedAttributeError` is raised.  Use
+            ``field_name: typing.Any = field(...)`` if you don't want to set a
+            type.
+
+            .. warning::
+
+                For features that use the attribute name to create decorators
+                (for example, :ref:`validators <validators>`), you still *must*
+                assign :func:`field` / `attr.ib` to them. Otherwise Python will
+                either not find the name or try to use the default value to
+                call, for example, ``validator`` on it.
+
+            Attributes annotated as `typing.ClassVar`, and attributes that are
+            neither annotated nor set to an `field()` are **ignored**.
+
+        these (dict[str, object]):
+            A dictionary of name to the (private) return value of `field()`
+            mappings. This is useful to avoid the definition of your attributes
+            within the class body because you can't (for example, if you want
+            to add ``__repr__`` methods to Django models) or don't want to.
+
+            If *these* is not `None`, *attrs* will *not* search the class body
+            for attributes and will *not* remove any attributes from it.
+
+            The order is deduced from the order of the attributes inside
+            *these*.
+
+            Arguably, this is a rather obscure feature.
+
+    .. versionadded:: 20.1.0
+    .. versionchanged:: 21.3.0 Converters are also run ``on_setattr``.
+    .. versionadded:: 22.2.0
+       *unsafe_hash* as an alias for *hash* (for :pep:`681` compliance).
+    .. versionchanged:: 24.1.0
+       Instances are not compared as tuples of attributes anymore, but using a
+       big ``and`` condition. This is faster and has more correct behavior for
+       uncomparable values like `math.nan`.
+    .. versionadded:: 24.1.0
+       If a class has an *inherited* classmethod called
+       ``__attrs_init_subclass__``, it is executed after the class is created.
+    .. deprecated:: 24.1.0 *hash* is deprecated in favor of *unsafe_hash*.
+    .. versionadded:: 24.3.0
+       Unless already present, a ``__replace__`` method is automatically
+       created for `copy.replace` (Python 3.13+ only).
+
+    .. note::
+
+        The main differences to the classic `attr.s` are:
+
+        - Automatically detect whether or not *auto_attribs* should be `True`
+          (c.f. *auto_attribs* parameter).
+        - Converters and validators run when attributes are set by default --
+          if *frozen* is `False`.
+        - *slots=True*
+
+          Usually, this has only upsides and few visible effects in everyday
+          programming. But it *can* lead to some surprising behaviors, so
+          please make sure to read :term:`slotted classes`.
+
+        - *auto_exc=True*
+        - *auto_detect=True*
+        - *order=False*
+        - Some options that were only relevant on Python 2 or were kept around
+          for backwards-compatibility have been removed.
+
+    """
+
+    def do_it(cls, auto_attribs):
+        return attrs(
+            maybe_cls=cls,
+            these=these,
+            repr=repr,
+            hash=hash,
+            unsafe_hash=unsafe_hash,
+            init=init,
+            slots=slots,
+            frozen=frozen,
+            weakref_slot=weakref_slot,
+            str=str,
+            auto_attribs=auto_attribs,
+            kw_only=kw_only,
+            cache_hash=cache_hash,
+            auto_exc=auto_exc,
+            eq=eq,
+            order=order,
+            auto_detect=auto_detect,
+            collect_by_mro=True,
+            getstate_setstate=getstate_setstate,
+            on_setattr=on_setattr,
+            field_transformer=field_transformer,
+            match_args=match_args,
+        )
+
+    def wrap(cls):
+        """
+        Making this a wrapper ensures this code runs during class creation.
+
+        We also ensure that frozen-ness of classes is inherited.
+        """
+        nonlocal frozen, on_setattr
+
+        had_on_setattr = on_setattr not in (None, setters.NO_OP)
+
+        # By default, mutable classes convert & validate on setattr.
+        if frozen is False and on_setattr is None:
+            on_setattr = _DEFAULT_ON_SETATTR
+
+        # However, if we subclass a frozen class, we inherit the immutability
+        # and disable on_setattr.
+        for base_cls in cls.__bases__:
+            if base_cls.__setattr__ is _frozen_setattrs:
+                if had_on_setattr:
+                    msg = "Frozen classes can't use on_setattr (frozen-ness was inherited)."
+                    raise ValueError(msg)
+
+                on_setattr = setters.NO_OP
+                break
+
+        if auto_attribs is not None:
+            return do_it(cls, auto_attribs)
+
+        try:
+            return do_it(cls, True)
+        except UnannotatedAttributeError:
+            return do_it(cls, False)
+
+    # maybe_cls's type depends on the usage of the decorator.  It's a class
+    # if it's used as `@attrs` but `None` if used as `@attrs()`.
+    if maybe_cls is None:
+        return wrap
+
+    return wrap(maybe_cls)
+
+
+mutable = define
+frozen = partial(define, frozen=True, on_setattr=None)
+
+
+def field(
+    *,
+    default=NOTHING,
+    validator=None,
+    repr=True,
+    hash=None,
+    init=True,
+    metadata=None,
+    type=None,
+    converter=None,
+    factory=None,
+    kw_only=False,
+    eq=None,
+    order=None,
+    on_setattr=None,
+    alias=None,
+):
+    """
+    Create a new :term:`field` / :term:`attribute` on a class.
+
+    ..  warning::
+
+        Does **nothing** unless the class is also decorated with
+        `attrs.define` (or similar)!
+
+    Args:
+        default:
+            A value that is used if an *attrs*-generated ``__init__`` is used
+            and no value is passed while instantiating or the attribute is
+            excluded using ``init=False``.
+
+            If the value is an instance of `attrs.Factory`, its callable will
+            be used to construct a new value (useful for mutable data types
+            like lists or dicts).
+
+            If a default is not set (or set manually to `attrs.NOTHING`), a
+            value *must* be supplied when instantiating; otherwise a
+            `TypeError` will be raised.
+
+            .. seealso:: `defaults`
+
+        factory (~typing.Callable):
+            Syntactic sugar for ``default=attr.Factory(factory)``.
+
+        validator (~typing.Callable | list[~typing.Callable]):
+            Callable that is called by *attrs*-generated ``__init__`` methods
+            after the instance has been initialized.  They receive the
+            initialized instance, the :func:`~attrs.Attribute`, and the passed
+            value.
+
+            The return value is *not* inspected so the validator has to throw
+            an exception itself.
+
+            If a `list` is passed, its items are treated as validators and must
+            all pass.
+
+            Validators can be globally disabled and re-enabled using
+            `attrs.validators.get_disabled` / `attrs.validators.set_disabled`.
+
+            The validator can also be set using decorator notation as shown
+            below.
+
+            .. seealso:: :ref:`validators`
+
+        repr (bool | ~typing.Callable):
+            Include this attribute in the generated ``__repr__`` method. If
+            True, include the attribute; if False, omit it. By default, the
+            built-in ``repr()`` function is used. To override how the attribute
+            value is formatted, pass a ``callable`` that takes a single value
+            and returns a string. Note that the resulting string is used as-is,
+            which means it will be used directly *instead* of calling
+            ``repr()`` (the default).
+
+        eq (bool | ~typing.Callable):
+            If True (default), include this attribute in the generated
+            ``__eq__`` and ``__ne__`` methods that check two instances for
+            equality. To override how the attribute value is compared, pass a
+            callable that takes a single value and returns the value to be
+            compared.
+
+            .. seealso:: `comparison`
+
+        order (bool | ~typing.Callable):
+            If True (default), include this attributes in the generated
+            ``__lt__``, ``__le__``, ``__gt__`` and ``__ge__`` methods. To
+            override how the attribute value is ordered, pass a callable that
+            takes a single value and returns the value to be ordered.
+
+            .. seealso:: `comparison`
+
+        hash (bool | None):
+            Include this attribute in the generated ``__hash__`` method.  If
+            None (default), mirror *eq*'s value.  This is the correct behavior
+            according the Python spec.  Setting this value to anything else
+            than None is *discouraged*.
+
+            .. seealso:: `hashing`
+
+        init (bool):
+            Include this attribute in the generated ``__init__`` method.
+
+            It is possible to set this to False and set a default value. In
+            that case this attributed is unconditionally initialized with the
+            specified default value or factory.
+
+            .. seealso:: `init`
+
+        converter (typing.Callable | Converter):
+            A callable that is called by *attrs*-generated ``__init__`` methods
+            to convert attribute's value to the desired format.
+
+            If a vanilla callable is passed, it is given the passed-in value as
+            the only positional argument. It is possible to receive additional
+            arguments by wrapping the callable in a `Converter`.
+
+            Either way, the returned value will be used as the new value of the
+            attribute.  The value is converted before being passed to the
+            validator, if any.
+
+            .. seealso:: :ref:`converters`
+
+        metadata (dict | None):
+            An arbitrary mapping, to be used by third-party code.
+
+            .. seealso:: `extending-metadata`.
+
+        type (type):
+            The type of the attribute. Nowadays, the preferred method to
+            specify the type is using a variable annotation (see :pep:`526`).
+            This argument is provided for backwards-compatibility and for usage
+            with `make_class`. Regardless of the approach used, the type will
+            be stored on ``Attribute.type``.
+
+            Please note that *attrs* doesn't do anything with this metadata by
+            itself. You can use it as part of your own code or for `static type
+            checking <types>`.
+
+        kw_only (bool):
+            Make this attribute keyword-only in the generated ``__init__`` (if
+            ``init`` is False, this parameter is ignored).
+
+        on_setattr (~typing.Callable | list[~typing.Callable] | None | ~typing.Literal[attrs.setters.NO_OP]):
+            Allows to overwrite the *on_setattr* setting from `attr.s`. If left
+            None, the *on_setattr* value from `attr.s` is used. Set to
+            `attrs.setters.NO_OP` to run **no** `setattr` hooks for this
+            attribute -- regardless of the setting in `define()`.
+
+        alias (str | None):
+            Override this attribute's parameter name in the generated
+            ``__init__`` method. If left None, default to ``name`` stripped
+            of leading underscores. See `private-attributes`.
+
+    .. versionadded:: 20.1.0
+    .. versionchanged:: 21.1.0
+       *eq*, *order*, and *cmp* also accept a custom callable
+    .. versionadded:: 22.2.0 *alias*
+    .. versionadded:: 23.1.0
+       The *type* parameter has been re-added; mostly for `attrs.make_class`.
+       Please note that type checkers ignore this metadata.
+
+    .. seealso::
+
+       `attr.ib`
+    """
+    return attrib(
+        default=default,
+        validator=validator,
+        repr=repr,
+        hash=hash,
+        init=init,
+        metadata=metadata,
+        type=type,
+        converter=converter,
+        factory=factory,
+        kw_only=kw_only,
+        eq=eq,
+        order=order,
+        on_setattr=on_setattr,
+        alias=alias,
+    )
+
+
+def asdict(inst, *, recurse=True, filter=None, value_serializer=None):
+    """
+    Same as `attr.asdict`, except that collections types are always retained
+    and dict is always used as *dict_factory*.
+
+    .. versionadded:: 21.3.0
+    """
+    return _asdict(
+        inst=inst,
+        recurse=recurse,
+        filter=filter,
+        value_serializer=value_serializer,
+        retain_collection_types=True,
+    )
+
+
+def astuple(inst, *, recurse=True, filter=None):
+    """
+    Same as `attr.astuple`, except that collections types are always retained
+    and `tuple` is always used as the *tuple_factory*.
+
+    .. versionadded:: 21.3.0
+    """
+    return _astuple(
+        inst=inst, recurse=recurse, filter=filter, retain_collection_types=True
+    )
diff --git a/.venv/lib/python3.12/site-packages/attr/_typing_compat.pyi b/.venv/lib/python3.12/site-packages/attr/_typing_compat.pyi
new file mode 100644
index 00000000..ca7b71e9
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/attr/_typing_compat.pyi
@@ -0,0 +1,15 @@
+from typing import Any, ClassVar, Protocol
+
+# MYPY is a special constant in mypy which works the same way as `TYPE_CHECKING`.
+MYPY = False
+
+if MYPY:
+    # A protocol to be able to statically accept an attrs class.
+    class AttrsInstance_(Protocol):
+        __attrs_attrs__: ClassVar[Any]
+
+else:
+    # For type checkers without plug-in support use an empty protocol that
+    # will (hopefully) be combined into a union.
+    class AttrsInstance_(Protocol):
+        pass
diff --git a/.venv/lib/python3.12/site-packages/attr/_version_info.py b/.venv/lib/python3.12/site-packages/attr/_version_info.py
new file mode 100644
index 00000000..51a1312f
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/attr/_version_info.py
@@ -0,0 +1,86 @@
+# SPDX-License-Identifier: MIT
+
+
+from functools import total_ordering
+
+from ._funcs import astuple
+from ._make import attrib, attrs
+
+
+@total_ordering
+@attrs(eq=False, order=False, slots=True, frozen=True)
+class VersionInfo:
+    """
+    A version object that can be compared to tuple of length 1--4:
+
+    >>> attr.VersionInfo(19, 1, 0, "final")  <= (19, 2)
+    True
+    >>> attr.VersionInfo(19, 1, 0, "final") < (19, 1, 1)
+    True
+    >>> vi = attr.VersionInfo(19, 2, 0, "final")
+    >>> vi < (19, 1, 1)
+    False
+    >>> vi < (19,)
+    False
+    >>> vi == (19, 2,)
+    True
+    >>> vi == (19, 2, 1)
+    False
+
+    .. versionadded:: 19.2
+    """
+
+    year = attrib(type=int)
+    minor = attrib(type=int)
+    micro = attrib(type=int)
+    releaselevel = attrib(type=str)
+
+    @classmethod
+    def _from_version_string(cls, s):
+        """
+        Parse *s* and return a _VersionInfo.
+        """
+        v = s.split(".")
+        if len(v) == 3:
+            v.append("final")
+
+        return cls(
+            year=int(v[0]), minor=int(v[1]), micro=int(v[2]), releaselevel=v[3]
+        )
+
+    def _ensure_tuple(self, other):
+        """
+        Ensure *other* is a tuple of a valid length.
+
+        Returns a possibly transformed *other* and ourselves as a tuple of
+        the same length as *other*.
+        """
+
+        if self.__class__ is other.__class__:
+            other = astuple(other)
+
+        if not isinstance(other, tuple):
+            raise NotImplementedError
+
+        if not (1 <= len(other) <= 4):
+            raise NotImplementedError
+
+        return astuple(self)[: len(other)], other
+
+    def __eq__(self, other):
+        try:
+            us, them = self._ensure_tuple(other)
+        except NotImplementedError:
+            return NotImplemented
+
+        return us == them
+
+    def __lt__(self, other):
+        try:
+            us, them = self._ensure_tuple(other)
+        except NotImplementedError:
+            return NotImplemented
+
+        # Since alphabetically "dev0" < "final" < "post1" < "post2", we don't
+        # have to do anything special with releaselevel for now.
+        return us < them
diff --git a/.venv/lib/python3.12/site-packages/attr/_version_info.pyi b/.venv/lib/python3.12/site-packages/attr/_version_info.pyi
new file mode 100644
index 00000000..45ced086
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/attr/_version_info.pyi
@@ -0,0 +1,9 @@
+class VersionInfo:
+    @property
+    def year(self) -> int: ...
+    @property
+    def minor(self) -> int: ...
+    @property
+    def micro(self) -> int: ...
+    @property
+    def releaselevel(self) -> str: ...
diff --git a/.venv/lib/python3.12/site-packages/attr/converters.py b/.venv/lib/python3.12/site-packages/attr/converters.py
new file mode 100644
index 00000000..0a79deef
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/attr/converters.py
@@ -0,0 +1,162 @@
+# SPDX-License-Identifier: MIT
+
+"""
+Commonly useful converters.
+"""
+
+import typing
+
+from ._compat import _AnnotationExtractor
+from ._make import NOTHING, Converter, Factory, pipe
+
+
+__all__ = [
+    "default_if_none",
+    "optional",
+    "pipe",
+    "to_bool",
+]
+
+
+def optional(converter):
+    """
+    A converter that allows an attribute to be optional. An optional attribute
+    is one which can be set to `None`.
+
+    Type annotations will be inferred from the wrapped converter's, if it has
+    any.
+
+    Args:
+        converter (typing.Callable):
+            the converter that is used for non-`None` values.
+
+    .. versionadded:: 17.1.0
+    """
+
+    if isinstance(converter, Converter):
+
+        def optional_converter(val, inst, field):
+            if val is None:
+                return None
+            return converter(val, inst, field)
+
+    else:
+
+        def optional_converter(val):
+            if val is None:
+                return None
+            return converter(val)
+
+    xtr = _AnnotationExtractor(converter)
+
+    t = xtr.get_first_param_type()
+    if t:
+        optional_converter.__annotations__["val"] = typing.Optional[t]
+
+    rt = xtr.get_return_type()
+    if rt:
+        optional_converter.__annotations__["return"] = typing.Optional[rt]
+
+    if isinstance(converter, Converter):
+        return Converter(optional_converter, takes_self=True, takes_field=True)
+
+    return optional_converter
+
+
+def default_if_none(default=NOTHING, factory=None):
+    """
+    A converter that allows to replace `None` values by *default* or the result
+    of *factory*.
+
+    Args:
+        default:
+            Value to be used if `None` is passed. Passing an instance of
+            `attrs.Factory` is supported, however the ``takes_self`` option is
+            *not*.
+
+        factory (typing.Callable):
+            A callable that takes no parameters whose result is used if `None`
+            is passed.
+
+    Raises:
+        TypeError: If **neither** *default* or *factory* is passed.
+
+        TypeError: If **both** *default* and *factory* are passed.
+
+        ValueError:
+            If an instance of `attrs.Factory` is passed with
+            ``takes_self=True``.
+
+    .. versionadded:: 18.2.0
+    """
+    if default is NOTHING and factory is None:
+        msg = "Must pass either `default` or `factory`."
+        raise TypeError(msg)
+
+    if default is not NOTHING and factory is not None:
+        msg = "Must pass either `default` or `factory` but not both."
+        raise TypeError(msg)
+
+    if factory is not None:
+        default = Factory(factory)
+
+    if isinstance(default, Factory):
+        if default.takes_self:
+            msg = "`takes_self` is not supported by default_if_none."
+            raise ValueError(msg)
+
+        def default_if_none_converter(val):
+            if val is not None:
+                return val
+
+            return default.factory()
+
+    else:
+
+        def default_if_none_converter(val):
+            if val is not None:
+                return val
+
+            return default
+
+    return default_if_none_converter
+
+
+def to_bool(val):
+    """
+    Convert "boolean" strings (for example, from environment variables) to real
+    booleans.
+
+    Values mapping to `True`:
+
+    - ``True``
+    - ``"true"`` / ``"t"``
+    - ``"yes"`` / ``"y"``
+    - ``"on"``
+    - ``"1"``
+    - ``1``
+
+    Values mapping to `False`:
+
+    - ``False``
+    - ``"false"`` / ``"f"``
+    - ``"no"`` / ``"n"``
+    - ``"off"``
+    - ``"0"``
+    - ``0``
+
+    Raises:
+        ValueError: For any other value.
+
+    .. versionadded:: 21.3.0
+    """
+    if isinstance(val, str):
+        val = val.lower()
+
+    if val in (True, "true", "t", "yes", "y", "on", "1", 1):
+        return True
+    if val in (False, "false", "f", "no", "n", "off", "0", 0):
+        return False
+
+    msg = f"Cannot convert value to bool: {val!r}"
+    raise ValueError(msg)
diff --git a/.venv/lib/python3.12/site-packages/attr/converters.pyi b/.venv/lib/python3.12/site-packages/attr/converters.pyi
new file mode 100644
index 00000000..12bd0c4f
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/attr/converters.pyi
@@ -0,0 +1,19 @@
+from typing import Callable, Any, overload
+
+from attrs import _ConverterType, _CallableConverterType
+
+@overload
+def pipe(*validators: _CallableConverterType) -> _CallableConverterType: ...
+@overload
+def pipe(*validators: _ConverterType) -> _ConverterType: ...
+@overload
+def optional(converter: _CallableConverterType) -> _CallableConverterType: ...
+@overload
+def optional(converter: _ConverterType) -> _ConverterType: ...
+@overload
+def default_if_none(default: Any) -> _CallableConverterType: ...
+@overload
+def default_if_none(
+    *, factory: Callable[[], Any]
+) -> _CallableConverterType: ...
+def to_bool(val: str | int | bool) -> bool: ...
diff --git a/.venv/lib/python3.12/site-packages/attr/exceptions.py b/.venv/lib/python3.12/site-packages/attr/exceptions.py
new file mode 100644
index 00000000..3b7abb81
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/attr/exceptions.py
@@ -0,0 +1,95 @@
+# SPDX-License-Identifier: MIT
+
+from __future__ import annotations
+
+from typing import ClassVar
+
+
+class FrozenError(AttributeError):
+    """
+    A frozen/immutable instance or attribute have been attempted to be
+    modified.
+
+    It mirrors the behavior of ``namedtuples`` by using the same error message
+    and subclassing `AttributeError`.
+
+    .. versionadded:: 20.1.0
+    """
+
+    msg = "can't set attribute"
+    args: ClassVar[tuple[str]] = [msg]
+
+
+class FrozenInstanceError(FrozenError):
+    """
+    A frozen instance has been attempted to be modified.
+
+    .. versionadded:: 16.1.0
+    """
+
+
+class FrozenAttributeError(FrozenError):
+    """
+    A frozen attribute has been attempted to be modified.
+
+    .. versionadded:: 20.1.0
+    """
+
+
+class AttrsAttributeNotFoundError(ValueError):
+    """
+    An *attrs* function couldn't find an attribute that the user asked for.
+
+    .. versionadded:: 16.2.0
+    """
+
+
+class NotAnAttrsClassError(ValueError):
+    """
+    A non-*attrs* class has been passed into an *attrs* function.
+
+    .. versionadded:: 16.2.0
+    """
+
+
+class DefaultAlreadySetError(RuntimeError):
+    """
+    A default has been set when defining the field and is attempted to be reset
+    using the decorator.
+
+    .. versionadded:: 17.1.0
+    """
+
+
+class UnannotatedAttributeError(RuntimeError):
+    """
+    A class with ``auto_attribs=True`` has a field without a type annotation.
+
+    .. versionadded:: 17.3.0
+    """
+
+
+class PythonTooOldError(RuntimeError):
+    """
+    It was attempted to use an *attrs* feature that requires a newer Python
+    version.
+
+    .. versionadded:: 18.2.0
+    """
+
+
+class NotCallableError(TypeError):
+    """
+    A field requiring a callable has been set with a value that is not
+    callable.
+
+    .. versionadded:: 19.2.0
+    """
+
+    def __init__(self, msg, value):
+        super(TypeError, self).__init__(msg, value)
+        self.msg = msg
+        self.value = value
+
+    def __str__(self):
+        return str(self.msg)
diff --git a/.venv/lib/python3.12/site-packages/attr/exceptions.pyi b/.venv/lib/python3.12/site-packages/attr/exceptions.pyi
new file mode 100644
index 00000000..f2680118
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/attr/exceptions.pyi
@@ -0,0 +1,17 @@
+from typing import Any
+
+class FrozenError(AttributeError):
+    msg: str = ...
+
+class FrozenInstanceError(FrozenError): ...
+class FrozenAttributeError(FrozenError): ...
+class AttrsAttributeNotFoundError(ValueError): ...
+class NotAnAttrsClassError(ValueError): ...
+class DefaultAlreadySetError(RuntimeError): ...
+class UnannotatedAttributeError(RuntimeError): ...
+class PythonTooOldError(RuntimeError): ...
+
+class NotCallableError(TypeError):
+    msg: str = ...
+    value: Any = ...
+    def __init__(self, msg: str, value: Any) -> None: ...
diff --git a/.venv/lib/python3.12/site-packages/attr/filters.py b/.venv/lib/python3.12/site-packages/attr/filters.py
new file mode 100644
index 00000000..689b1705
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/attr/filters.py
@@ -0,0 +1,72 @@
+# SPDX-License-Identifier: MIT
+
+"""
+Commonly useful filters for `attrs.asdict` and `attrs.astuple`.
+"""
+
+from ._make import Attribute
+
+
+def _split_what(what):
+    """
+    Returns a tuple of `frozenset`s of classes and attributes.
+    """
+    return (
+        frozenset(cls for cls in what if isinstance(cls, type)),
+        frozenset(cls for cls in what if isinstance(cls, str)),
+        frozenset(cls for cls in what if isinstance(cls, Attribute)),
+    )
+
+
+def include(*what):
+    """
+    Create a filter that only allows *what*.
+
+    Args:
+        what (list[type, str, attrs.Attribute]):
+            What to include. Can be a type, a name, or an attribute.
+
+    Returns:
+        Callable:
+            A callable that can be passed to `attrs.asdict`'s and
+            `attrs.astuple`'s *filter* argument.
+
+    .. versionchanged:: 23.1.0 Accept strings with field names.
+    """
+    cls, names, attrs = _split_what(what)
+
+    def include_(attribute, value):
+        return (
+            value.__class__ in cls
+            or attribute.name in names
+            or attribute in attrs
+        )
+
+    return include_
+
+
+def exclude(*what):
+    """
+    Create a filter that does **not** allow *what*.
+
+    Args:
+        what (list[type, str, attrs.Attribute]):
+            What to exclude. Can be a type, a name, or an attribute.
+
+    Returns:
+        Callable:
+            A callable that can be passed to `attrs.asdict`'s and
+            `attrs.astuple`'s *filter* argument.
+
+    .. versionchanged:: 23.3.0 Accept field name string as input argument
+    """
+    cls, names, attrs = _split_what(what)
+
+    def exclude_(attribute, value):
+        return not (
+            value.__class__ in cls
+            or attribute.name in names
+            or attribute in attrs
+        )
+
+    return exclude_
diff --git a/.venv/lib/python3.12/site-packages/attr/filters.pyi b/.venv/lib/python3.12/site-packages/attr/filters.pyi
new file mode 100644
index 00000000..974abdcd
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/attr/filters.pyi
@@ -0,0 +1,6 @@
+from typing import Any
+
+from . import Attribute, _FilterType
+
+def include(*what: type | str | Attribute[Any]) -> _FilterType[Any]: ...
+def exclude(*what: type | str | Attribute[Any]) -> _FilterType[Any]: ...
diff --git a/.venv/lib/python3.12/site-packages/attr/py.typed b/.venv/lib/python3.12/site-packages/attr/py.typed
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/attr/py.typed
diff --git a/.venv/lib/python3.12/site-packages/attr/setters.py b/.venv/lib/python3.12/site-packages/attr/setters.py
new file mode 100644
index 00000000..78b08398
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/attr/setters.py
@@ -0,0 +1,79 @@
+# SPDX-License-Identifier: MIT
+
+"""
+Commonly used hooks for on_setattr.
+"""
+
+from . import _config
+from .exceptions import FrozenAttributeError
+
+
+def pipe(*setters):
+    """
+    Run all *setters* and return the return value of the last one.
+
+    .. versionadded:: 20.1.0
+    """
+
+    def wrapped_pipe(instance, attrib, new_value):
+        rv = new_value
+
+        for setter in setters:
+            rv = setter(instance, attrib, rv)
+
+        return rv
+
+    return wrapped_pipe
+
+
+def frozen(_, __, ___):
+    """
+    Prevent an attribute to be modified.
+
+    .. versionadded:: 20.1.0
+    """
+    raise FrozenAttributeError
+
+
+def validate(instance, attrib, new_value):
+    """
+    Run *attrib*'s validator on *new_value* if it has one.
+
+    .. versionadded:: 20.1.0
+    """
+    if _config._run_validators is False:
+        return new_value
+
+    v = attrib.validator
+    if not v:
+        return new_value
+
+    v(instance, attrib, new_value)
+
+    return new_value
+
+
+def convert(instance, attrib, new_value):
+    """
+    Run *attrib*'s converter -- if it has one -- on *new_value* and return the
+    result.
+
+    .. versionadded:: 20.1.0
+    """
+    c = attrib.converter
+    if c:
+        # This can be removed once we drop 3.8 and use attrs.Converter instead.
+        from ._make import Converter
+
+        if not isinstance(c, Converter):
+            return c(new_value)
+
+        return c(new_value, instance, attrib)
+
+    return new_value
+
+
+# Sentinel for disabling class-wide *on_setattr* hooks for certain attributes.
+# Sphinx's autodata stopped working, so the docstring is inlined in the API
+# docs.
+NO_OP = object()
diff --git a/.venv/lib/python3.12/site-packages/attr/setters.pyi b/.venv/lib/python3.12/site-packages/attr/setters.pyi
new file mode 100644
index 00000000..73abf36e
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/attr/setters.pyi
@@ -0,0 +1,20 @@
+from typing import Any, NewType, NoReturn, TypeVar
+
+from . import Attribute
+from attrs import _OnSetAttrType
+
+_T = TypeVar("_T")
+
+def frozen(
+    instance: Any, attribute: Attribute[Any], new_value: Any
+) -> NoReturn: ...
+def pipe(*setters: _OnSetAttrType) -> _OnSetAttrType: ...
+def validate(instance: Any, attribute: Attribute[_T], new_value: _T) -> _T: ...
+
+# convert is allowed to return Any, because they can be chained using pipe.
+def convert(
+    instance: Any, attribute: Attribute[Any], new_value: Any
+) -> Any: ...
+
+_NoOpType = NewType("_NoOpType", object)
+NO_OP: _NoOpType
diff --git a/.venv/lib/python3.12/site-packages/attr/validators.py b/.venv/lib/python3.12/site-packages/attr/validators.py
new file mode 100644
index 00000000..e7b75525
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/attr/validators.py
@@ -0,0 +1,710 @@
+# SPDX-License-Identifier: MIT
+
+"""
+Commonly useful validators.
+"""
+
+import operator
+import re
+
+from contextlib import contextmanager
+from re import Pattern
+
+from ._config import get_run_validators, set_run_validators
+from ._make import _AndValidator, and_, attrib, attrs
+from .converters import default_if_none
+from .exceptions import NotCallableError
+
+
+__all__ = [
+    "and_",
+    "deep_iterable",
+    "deep_mapping",
+    "disabled",
+    "ge",
+    "get_disabled",
+    "gt",
+    "in_",
+    "instance_of",
+    "is_callable",
+    "le",
+    "lt",
+    "matches_re",
+    "max_len",
+    "min_len",
+    "not_",
+    "optional",
+    "or_",
+    "set_disabled",
+]
+
+
+def set_disabled(disabled):
+    """
+    Globally disable or enable running validators.
+
+    By default, they are run.
+
+    Args:
+        disabled (bool): If `True`, disable running all validators.
+
+    .. warning::
+
+        This function is not thread-safe!
+
+    .. versionadded:: 21.3.0
+    """
+    set_run_validators(not disabled)
+
+
+def get_disabled():
+    """
+    Return a bool indicating whether validators are currently disabled or not.
+
+    Returns:
+        bool:`True` if validators are currently disabled.
+
+    .. versionadded:: 21.3.0
+    """
+    return not get_run_validators()
+
+
+@contextmanager
+def disabled():
+    """
+    Context manager that disables running validators within its context.
+
+    .. warning::
+
+        This context manager is not thread-safe!
+
+    .. versionadded:: 21.3.0
+    """
+    set_run_validators(False)
+    try:
+        yield
+    finally:
+        set_run_validators(True)
+
+
+@attrs(repr=False, slots=True, unsafe_hash=True)
+class _InstanceOfValidator:
+    type = attrib()
+
+    def __call__(self, inst, attr, value):
+        """
+        We use a callable class to be able to change the ``__repr__``.
+        """
+        if not isinstance(value, self.type):
+            msg = f"'{attr.name}' must be {self.type!r} (got {value!r} that is a {value.__class__!r})."
+            raise TypeError(
+                msg,
+                attr,
+                self.type,
+                value,
+            )
+
+    def __repr__(self):
+        return f"<instance_of validator for type {self.type!r}>"
+
+
+def instance_of(type):
+    """
+    A validator that raises a `TypeError` if the initializer is called with a
+    wrong type for this particular attribute (checks are performed using
+    `isinstance` therefore it's also valid to pass a tuple of types).
+
+    Args:
+        type (type | tuple[type]): The type to check for.
+
+    Raises:
+        TypeError:
+            With a human readable error message, the attribute (of type
+            `attrs.Attribute`), the expected type, and the value it got.
+    """
+    return _InstanceOfValidator(type)
+
+
+@attrs(repr=False, frozen=True, slots=True)
+class _MatchesReValidator:
+    pattern = attrib()
+    match_func = attrib()
+
+    def __call__(self, inst, attr, value):
+        """
+        We use a callable class to be able to change the ``__repr__``.
+        """
+        if not self.match_func(value):
+            msg = f"'{attr.name}' must match regex {self.pattern.pattern!r} ({value!r} doesn't)"
+            raise ValueError(
+                msg,
+                attr,
+                self.pattern,
+                value,
+            )
+
+    def __repr__(self):
+        return f"<matches_re validator for pattern {self.pattern!r}>"
+
+
+def matches_re(regex, flags=0, func=None):
+    r"""
+    A validator that raises `ValueError` if the initializer is called with a
+    string that doesn't match *regex*.
+
+    Args:
+        regex (str, re.Pattern):
+            A regex string or precompiled pattern to match against
+
+        flags (int):
+            Flags that will be passed to the underlying re function (default 0)
+
+        func (typing.Callable):
+            Which underlying `re` function to call. Valid options are
+            `re.fullmatch`, `re.search`, and `re.match`; the default `None`
+            means `re.fullmatch`. For performance reasons, the pattern is
+            always precompiled using `re.compile`.
+
+    .. versionadded:: 19.2.0
+    .. versionchanged:: 21.3.0 *regex* can be a pre-compiled pattern.
+    """
+    valid_funcs = (re.fullmatch, None, re.search, re.match)
+    if func not in valid_funcs:
+        msg = "'func' must be one of {}.".format(
+            ", ".join(
+                sorted((e and e.__name__) or "None" for e in set(valid_funcs))
+            )
+        )
+        raise ValueError(msg)
+
+    if isinstance(regex, Pattern):
+        if flags:
+            msg = "'flags' can only be used with a string pattern; pass flags to re.compile() instead"
+            raise TypeError(msg)
+        pattern = regex
+    else:
+        pattern = re.compile(regex, flags)
+
+    if func is re.match:
+        match_func = pattern.match
+    elif func is re.search:
+        match_func = pattern.search
+    else:
+        match_func = pattern.fullmatch
+
+    return _MatchesReValidator(pattern, match_func)
+
+
+@attrs(repr=False, slots=True, unsafe_hash=True)
+class _OptionalValidator:
+    validator = attrib()
+
+    def __call__(self, inst, attr, value):
+        if value is None:
+            return
+
+        self.validator(inst, attr, value)
+
+    def __repr__(self):
+        return f"<optional validator for {self.validator!r} or None>"
+
+
+def optional(validator):
+    """
+    A validator that makes an attribute optional.  An optional attribute is one
+    which can be set to `None` in addition to satisfying the requirements of
+    the sub-validator.
+
+    Args:
+        validator
+            (typing.Callable | tuple[typing.Callable] | list[typing.Callable]):
+            A validator (or validators) that is used for non-`None` values.
+
+    .. versionadded:: 15.1.0
+    .. versionchanged:: 17.1.0 *validator* can be a list of validators.
+    .. versionchanged:: 23.1.0 *validator* can also be a tuple of validators.
+    """
+    if isinstance(validator, (list, tuple)):
+        return _OptionalValidator(_AndValidator(validator))
+
+    return _OptionalValidator(validator)
+
+
+@attrs(repr=False, slots=True, unsafe_hash=True)
+class _InValidator:
+    options = attrib()
+    _original_options = attrib(hash=False)
+
+    def __call__(self, inst, attr, value):
+        try:
+            in_options = value in self.options
+        except TypeError:  # e.g. `1 in "abc"`
+            in_options = False
+
+        if not in_options:
+            msg = f"'{attr.name}' must be in {self._original_options!r} (got {value!r})"
+            raise ValueError(
+                msg,
+                attr,
+                self._original_options,
+                value,
+            )
+
+    def __repr__(self):
+        return f"<in_ validator with options {self._original_options!r}>"
+
+
+def in_(options):
+    """
+    A validator that raises a `ValueError` if the initializer is called with a
+    value that does not belong in the *options* provided.
+
+    The check is performed using ``value in options``, so *options* has to
+    support that operation.
+
+    To keep the validator hashable, dicts, lists, and sets are transparently
+    transformed into a `tuple`.
+
+    Args:
+        options: Allowed options.
+
+    Raises:
+        ValueError:
+            With a human readable error message, the attribute (of type
+            `attrs.Attribute`), the expected options, and the value it got.
+
+    .. versionadded:: 17.1.0
+    .. versionchanged:: 22.1.0
+       The ValueError was incomplete until now and only contained the human
+       readable error message. Now it contains all the information that has
+       been promised since 17.1.0.
+    .. versionchanged:: 24.1.0
+       *options* that are a list, dict, or a set are now transformed into a
+       tuple to keep the validator hashable.
+    """
+    repr_options = options
+    if isinstance(options, (list, dict, set)):
+        options = tuple(options)
+
+    return _InValidator(options, repr_options)
+
+
+@attrs(repr=False, slots=False, unsafe_hash=True)
+class _IsCallableValidator:
+    def __call__(self, inst, attr, value):
+        """
+        We use a callable class to be able to change the ``__repr__``.
+        """
+        if not callable(value):
+            message = (
+                "'{name}' must be callable "
+                "(got {value!r} that is a {actual!r})."
+            )
+            raise NotCallableError(
+                msg=message.format(
+                    name=attr.name, value=value, actual=value.__class__
+                ),
+                value=value,
+            )
+
+    def __repr__(self):
+        return "<is_callable validator>"
+
+
+def is_callable():
+    """
+    A validator that raises a `attrs.exceptions.NotCallableError` if the
+    initializer is called with a value for this particular attribute that is
+    not callable.
+
+    .. versionadded:: 19.1.0
+
+    Raises:
+        attrs.exceptions.NotCallableError:
+            With a human readable error message containing the attribute
+            (`attrs.Attribute`) name, and the value it got.
+    """
+    return _IsCallableValidator()
+
+
+@attrs(repr=False, slots=True, unsafe_hash=True)
+class _DeepIterable:
+    member_validator = attrib(validator=is_callable())
+    iterable_validator = attrib(
+        default=None, validator=optional(is_callable())
+    )
+
+    def __call__(self, inst, attr, value):
+        """
+        We use a callable class to be able to change the ``__repr__``.
+        """
+        if self.iterable_validator is not None:
+            self.iterable_validator(inst, attr, value)
+
+        for member in value:
+            self.member_validator(inst, attr, member)
+
+    def __repr__(self):
+        iterable_identifier = (
+            ""
+            if self.iterable_validator is None
+            else f" {self.iterable_validator!r}"
+        )
+        return (
+            f"<deep_iterable validator for{iterable_identifier}"
+            f" iterables of {self.member_validator!r}>"
+        )
+
+
+def deep_iterable(member_validator, iterable_validator=None):
+    """
+    A validator that performs deep validation of an iterable.
+
+    Args:
+        member_validator: Validator to apply to iterable members.
+
+        iterable_validator:
+            Validator to apply to iterable itself (optional).
+
+    Raises
+        TypeError: if any sub-validators fail
+
+    .. versionadded:: 19.1.0
+    """
+    if isinstance(member_validator, (list, tuple)):
+        member_validator = and_(*member_validator)
+    return _DeepIterable(member_validator, iterable_validator)
+
+
+@attrs(repr=False, slots=True, unsafe_hash=True)
+class _DeepMapping:
+    key_validator = attrib(validator=is_callable())
+    value_validator = attrib(validator=is_callable())
+    mapping_validator = attrib(default=None, validator=optional(is_callable()))
+
+    def __call__(self, inst, attr, value):
+        """
+        We use a callable class to be able to change the ``__repr__``.
+        """
+        if self.mapping_validator is not None:
+            self.mapping_validator(inst, attr, value)
+
+        for key in value:
+            self.key_validator(inst, attr, key)
+            self.value_validator(inst, attr, value[key])
+
+    def __repr__(self):
+        return f"<deep_mapping validator for objects mapping {self.key_validator!r} to {self.value_validator!r}>"
+
+
+def deep_mapping(key_validator, value_validator, mapping_validator=None):
+    """
+    A validator that performs deep validation of a dictionary.
+
+    Args:
+        key_validator: Validator to apply to dictionary keys.
+
+        value_validator: Validator to apply to dictionary values.
+
+        mapping_validator:
+            Validator to apply to top-level mapping attribute (optional).
+
+    .. versionadded:: 19.1.0
+
+    Raises:
+        TypeError: if any sub-validators fail
+    """
+    return _DeepMapping(key_validator, value_validator, mapping_validator)
+
+
+@attrs(repr=False, frozen=True, slots=True)
+class _NumberValidator:
+    bound = attrib()
+    compare_op = attrib()
+    compare_func = attrib()
+
+    def __call__(self, inst, attr, value):
+        """
+        We use a callable class to be able to change the ``__repr__``.
+        """
+        if not self.compare_func(value, self.bound):
+            msg = f"'{attr.name}' must be {self.compare_op} {self.bound}: {value}"
+            raise ValueError(msg)
+
+    def __repr__(self):
+        return f"<Validator for x {self.compare_op} {self.bound}>"
+
+
+def lt(val):
+    """
+    A validator that raises `ValueError` if the initializer is called with a
+    number larger or equal to *val*.
+
+    The validator uses `operator.lt` to compare the values.
+
+    Args:
+        val: Exclusive upper bound for values.
+
+    .. versionadded:: 21.3.0
+    """
+    return _NumberValidator(val, "<", operator.lt)
+
+
+def le(val):
+    """
+    A validator that raises `ValueError` if the initializer is called with a
+    number greater than *val*.
+
+    The validator uses `operator.le` to compare the values.
+
+    Args:
+        val: Inclusive upper bound for values.
+
+    .. versionadded:: 21.3.0
+    """
+    return _NumberValidator(val, "<=", operator.le)
+
+
+def ge(val):
+    """
+    A validator that raises `ValueError` if the initializer is called with a
+    number smaller than *val*.
+
+    The validator uses `operator.ge` to compare the values.
+
+    Args:
+        val: Inclusive lower bound for values
+
+    .. versionadded:: 21.3.0
+    """
+    return _NumberValidator(val, ">=", operator.ge)
+
+
+def gt(val):
+    """
+    A validator that raises `ValueError` if the initializer is called with a
+    number smaller or equal to *val*.
+
+    The validator uses `operator.ge` to compare the values.
+
+    Args:
+       val: Exclusive lower bound for values
+
+    .. versionadded:: 21.3.0
+    """
+    return _NumberValidator(val, ">", operator.gt)
+
+
+@attrs(repr=False, frozen=True, slots=True)
+class _MaxLengthValidator:
+    max_length = attrib()
+
+    def __call__(self, inst, attr, value):
+        """
+        We use a callable class to be able to change the ``__repr__``.
+        """
+        if len(value) > self.max_length:
+            msg = f"Length of '{attr.name}' must be <= {self.max_length}: {len(value)}"
+            raise ValueError(msg)
+
+    def __repr__(self):
+        return f"<max_len validator for {self.max_length}>"
+
+
+def max_len(length):
+    """
+    A validator that raises `ValueError` if the initializer is called
+    with a string or iterable that is longer than *length*.
+
+    Args:
+        length (int): Maximum length of the string or iterable
+
+    .. versionadded:: 21.3.0
+    """
+    return _MaxLengthValidator(length)
+
+
+@attrs(repr=False, frozen=True, slots=True)
+class _MinLengthValidator:
+    min_length = attrib()
+
+    def __call__(self, inst, attr, value):
+        """
+        We use a callable class to be able to change the ``__repr__``.
+        """
+        if len(value) < self.min_length:
+            msg = f"Length of '{attr.name}' must be >= {self.min_length}: {len(value)}"
+            raise ValueError(msg)
+
+    def __repr__(self):
+        return f"<min_len validator for {self.min_length}>"
+
+
+def min_len(length):
+    """
+    A validator that raises `ValueError` if the initializer is called
+    with a string or iterable that is shorter than *length*.
+
+    Args:
+        length (int): Minimum length of the string or iterable
+
+    .. versionadded:: 22.1.0
+    """
+    return _MinLengthValidator(length)
+
+
+@attrs(repr=False, slots=True, unsafe_hash=True)
+class _SubclassOfValidator:
+    type = attrib()
+
+    def __call__(self, inst, attr, value):
+        """
+        We use a callable class to be able to change the ``__repr__``.
+        """
+        if not issubclass(value, self.type):
+            msg = f"'{attr.name}' must be a subclass of {self.type!r} (got {value!r})."
+            raise TypeError(
+                msg,
+                attr,
+                self.type,
+                value,
+            )
+
+    def __repr__(self):
+        return f"<subclass_of validator for type {self.type!r}>"
+
+
+def _subclass_of(type):
+    """
+    A validator that raises a `TypeError` if the initializer is called with a
+    wrong type for this particular attribute (checks are performed using
+    `issubclass` therefore it's also valid to pass a tuple of types).
+
+    Args:
+        type (type | tuple[type, ...]): The type(s) to check for.
+
+    Raises:
+        TypeError:
+            With a human readable error message, the attribute (of type
+            `attrs.Attribute`), the expected type, and the value it got.
+    """
+    return _SubclassOfValidator(type)
+
+
+@attrs(repr=False, slots=True, unsafe_hash=True)
+class _NotValidator:
+    validator = attrib()
+    msg = attrib(
+        converter=default_if_none(
+            "not_ validator child '{validator!r}' "
+            "did not raise a captured error"
+        )
+    )
+    exc_types = attrib(
+        validator=deep_iterable(
+            member_validator=_subclass_of(Exception),
+            iterable_validator=instance_of(tuple),
+        ),
+    )
+
+    def __call__(self, inst, attr, value):
+        try:
+            self.validator(inst, attr, value)
+        except self.exc_types:
+            pass  # suppress error to invert validity
+        else:
+            raise ValueError(
+                self.msg.format(
+                    validator=self.validator,
+                    exc_types=self.exc_types,
+                ),
+                attr,
+                self.validator,
+                value,
+                self.exc_types,
+            )
+
+    def __repr__(self):
+        return f"<not_ validator wrapping {self.validator!r}, capturing {self.exc_types!r}>"
+
+
+def not_(validator, *, msg=None, exc_types=(ValueError, TypeError)):
+    """
+    A validator that wraps and logically 'inverts' the validator passed to it.
+    It will raise a `ValueError` if the provided validator *doesn't* raise a
+    `ValueError` or `TypeError` (by default), and will suppress the exception
+    if the provided validator *does*.
+
+    Intended to be used with existing validators to compose logic without
+    needing to create inverted variants, for example, ``not_(in_(...))``.
+
+    Args:
+        validator: A validator to be logically inverted.
+
+        msg (str):
+            Message to raise if validator fails. Formatted with keys
+            ``exc_types`` and ``validator``.
+
+        exc_types (tuple[type, ...]):
+            Exception type(s) to capture. Other types raised by child
+            validators will not be intercepted and pass through.
+
+    Raises:
+        ValueError:
+            With a human readable error message, the attribute (of type
+            `attrs.Attribute`), the validator that failed to raise an
+            exception, the value it got, and the expected exception types.
+
+    .. versionadded:: 22.2.0
+    """
+    try:
+        exc_types = tuple(exc_types)
+    except TypeError:
+        exc_types = (exc_types,)
+    return _NotValidator(validator, msg, exc_types)
+
+
+@attrs(repr=False, slots=True, unsafe_hash=True)
+class _OrValidator:
+    validators = attrib()
+
+    def __call__(self, inst, attr, value):
+        for v in self.validators:
+            try:
+                v(inst, attr, value)
+            except Exception:  # noqa: BLE001, PERF203, S112
+                continue
+            else:
+                return
+
+        msg = f"None of {self.validators!r} satisfied for value {value!r}"
+        raise ValueError(msg)
+
+    def __repr__(self):
+        return f"<or validator wrapping {self.validators!r}>"
+
+
+def or_(*validators):
+    """
+    A validator that composes multiple validators into one.
+
+    When called on a value, it runs all wrapped validators until one of them is
+    satisfied.
+
+    Args:
+        validators (~collections.abc.Iterable[typing.Callable]):
+            Arbitrary number of validators.
+
+    Raises:
+        ValueError:
+            If no validator is satisfied. Raised with a human-readable error
+            message listing all the wrapped validators and the value that
+            failed all of them.
+
+    .. versionadded:: 24.1.0
+    """
+    vals = []
+    for v in validators:
+        vals.extend(v.validators if isinstance(v, _OrValidator) else [v])
+
+    return _OrValidator(tuple(vals))
diff --git a/.venv/lib/python3.12/site-packages/attr/validators.pyi b/.venv/lib/python3.12/site-packages/attr/validators.pyi
new file mode 100644
index 00000000..a0fdda7c
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/attr/validators.pyi
@@ -0,0 +1,86 @@
+from types import UnionType
+from typing import (
+    Any,
+    AnyStr,
+    Callable,
+    Container,
+    ContextManager,
+    Iterable,
+    Mapping,
+    Match,
+    Pattern,
+    TypeVar,
+    overload,
+)
+
+from attrs import _ValidatorType
+from attrs import _ValidatorArgType
+
+_T = TypeVar("_T")
+_T1 = TypeVar("_T1")
+_T2 = TypeVar("_T2")
+_T3 = TypeVar("_T3")
+_I = TypeVar("_I", bound=Iterable)
+_K = TypeVar("_K")
+_V = TypeVar("_V")
+_M = TypeVar("_M", bound=Mapping)
+
+def set_disabled(run: bool) -> None: ...
+def get_disabled() -> bool: ...
+def disabled() -> ContextManager[None]: ...
+
+# To be more precise on instance_of use some overloads.
+# If there are more than 3 items in the tuple then we fall back to Any
+@overload
+def instance_of(type: type[_T]) -> _ValidatorType[_T]: ...
+@overload
+def instance_of(type: tuple[type[_T]]) -> _ValidatorType[_T]: ...
+@overload
+def instance_of(
+    type: tuple[type[_T1], type[_T2]],
+) -> _ValidatorType[_T1 | _T2]: ...
+@overload
+def instance_of(
+    type: tuple[type[_T1], type[_T2], type[_T3]],
+) -> _ValidatorType[_T1 | _T2 | _T3]: ...
+@overload
+def instance_of(type: tuple[type, ...]) -> _ValidatorType[Any]: ...
+@overload
+def instance_of(type: UnionType) -> _ValidatorType[Any]: ...
+def optional(
+    validator: (
+        _ValidatorType[_T]
+        | list[_ValidatorType[_T]]
+        | tuple[_ValidatorType[_T]]
+    ),
+) -> _ValidatorType[_T | None]: ...
+def in_(options: Container[_T]) -> _ValidatorType[_T]: ...
+def and_(*validators: _ValidatorType[_T]) -> _ValidatorType[_T]: ...
+def matches_re(
+    regex: Pattern[AnyStr] | AnyStr,
+    flags: int = ...,
+    func: Callable[[AnyStr, AnyStr, int], Match[AnyStr] | None] | None = ...,
+) -> _ValidatorType[AnyStr]: ...
+def deep_iterable(
+    member_validator: _ValidatorArgType[_T],
+    iterable_validator: _ValidatorType[_I] | None = ...,
+) -> _ValidatorType[_I]: ...
+def deep_mapping(
+    key_validator: _ValidatorType[_K],
+    value_validator: _ValidatorType[_V],
+    mapping_validator: _ValidatorType[_M] | None = ...,
+) -> _ValidatorType[_M]: ...
+def is_callable() -> _ValidatorType[_T]: ...
+def lt(val: _T) -> _ValidatorType[_T]: ...
+def le(val: _T) -> _ValidatorType[_T]: ...
+def ge(val: _T) -> _ValidatorType[_T]: ...
+def gt(val: _T) -> _ValidatorType[_T]: ...
+def max_len(length: int) -> _ValidatorType[_T]: ...
+def min_len(length: int) -> _ValidatorType[_T]: ...
+def not_(
+    validator: _ValidatorType[_T],
+    *,
+    msg: str | None = None,
+    exc_types: type[Exception] | Iterable[type[Exception]] = ...,
+) -> _ValidatorType[_T]: ...
+def or_(*validators: _ValidatorType[_T]) -> _ValidatorType[_T]: ...