about summary refs log tree commit diff
path: root/.venv/lib/python3.12/site-packages/setuptools/command
diff options
context:
space:
mode:
Diffstat (limited to '.venv/lib/python3.12/site-packages/setuptools/command')
-rw-r--r--.venv/lib/python3.12/site-packages/setuptools/command/__init__.py21
-rw-r--r--.venv/lib/python3.12/site-packages/setuptools/command/_requirestxt.py131
-rw-r--r--.venv/lib/python3.12/site-packages/setuptools/command/alias.py77
-rw-r--r--.venv/lib/python3.12/site-packages/setuptools/command/bdist_egg.py479
-rw-r--r--.venv/lib/python3.12/site-packages/setuptools/command/bdist_rpm.py42
-rw-r--r--.venv/lib/python3.12/site-packages/setuptools/command/bdist_wheel.py604
-rw-r--r--.venv/lib/python3.12/site-packages/setuptools/command/build.py135
-rw-r--r--.venv/lib/python3.12/site-packages/setuptools/command/build_clib.py103
-rw-r--r--.venv/lib/python3.12/site-packages/setuptools/command/build_ext.py469
-rw-r--r--.venv/lib/python3.12/site-packages/setuptools/command/build_py.py400
-rw-r--r--.venv/lib/python3.12/site-packages/setuptools/command/develop.py195
-rw-r--r--.venv/lib/python3.12/site-packages/setuptools/command/dist_info.py103
-rw-r--r--.venv/lib/python3.12/site-packages/setuptools/command/easy_install.py2365
-rw-r--r--.venv/lib/python3.12/site-packages/setuptools/command/editable_wheel.py925
-rw-r--r--.venv/lib/python3.12/site-packages/setuptools/command/egg_info.py720
-rw-r--r--.venv/lib/python3.12/site-packages/setuptools/command/install.py183
-rw-r--r--.venv/lib/python3.12/site-packages/setuptools/command/install_egg_info.py58
-rw-r--r--.venv/lib/python3.12/site-packages/setuptools/command/install_lib.py137
-rw-r--r--.venv/lib/python3.12/site-packages/setuptools/command/install_scripts.py73
-rw-r--r--.venv/lib/python3.12/site-packages/setuptools/command/launcher manifest.xml15
-rw-r--r--.venv/lib/python3.12/site-packages/setuptools/command/rotate.py65
-rw-r--r--.venv/lib/python3.12/site-packages/setuptools/command/saveopts.py21
-rw-r--r--.venv/lib/python3.12/site-packages/setuptools/command/sdist.py217
-rw-r--r--.venv/lib/python3.12/site-packages/setuptools/command/setopt.py141
-rw-r--r--.venv/lib/python3.12/site-packages/setuptools/command/test.py45
25 files changed, 7724 insertions, 0 deletions
diff --git a/.venv/lib/python3.12/site-packages/setuptools/command/__init__.py b/.venv/lib/python3.12/site-packages/setuptools/command/__init__.py
new file mode 100644
index 00000000..50e6c2f5
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/setuptools/command/__init__.py
@@ -0,0 +1,21 @@
+# mypy: disable_error_code=call-overload
+# pyright: reportCallIssue=false, reportArgumentType=false
+# Can't disable on the exact line because distutils doesn't exists on Python 3.12
+# and type-checkers aren't aware of distutils_hack,
+# causing distutils.command.bdist.bdist.format_commands to be Any.
+
+import sys
+
+from distutils.command.bdist import bdist
+
+if 'egg' not in bdist.format_commands:
+    try:
+        # format_commands is a dict in vendored distutils
+        # It used to be a list in older (stdlib) distutils
+        # We support both for backwards compatibility
+        bdist.format_commands['egg'] = ('bdist_egg', "Python .egg file")
+    except TypeError:
+        bdist.format_command['egg'] = ('bdist_egg', "Python .egg file")
+        bdist.format_commands.append('egg')
+
+del bdist, sys
diff --git a/.venv/lib/python3.12/site-packages/setuptools/command/_requirestxt.py b/.venv/lib/python3.12/site-packages/setuptools/command/_requirestxt.py
new file mode 100644
index 00000000..9029b125
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/setuptools/command/_requirestxt.py
@@ -0,0 +1,131 @@
+"""Helper code used to generate ``requires.txt`` files in the egg-info directory.
+
+The ``requires.txt`` file has an specific format:
+    - Environment markers need to be part of the section headers and
+      should not be part of the requirement spec itself.
+
+See https://setuptools.pypa.io/en/latest/deprecated/python_eggs.html#requires-txt
+"""
+
+from __future__ import annotations
+
+import io
+from collections import defaultdict
+from collections.abc import Mapping
+from itertools import filterfalse
+from typing import TypeVar
+
+from jaraco.text import yield_lines
+from packaging.requirements import Requirement
+
+from .. import _reqs
+from .._reqs import _StrOrIter
+
+# dict can work as an ordered set
+_T = TypeVar("_T")
+_Ordered = dict[_T, None]
+
+
+def _prepare(
+    install_requires: _StrOrIter, extras_require: Mapping[str, _StrOrIter]
+) -> tuple[list[str], dict[str, list[str]]]:
+    """Given values for ``install_requires`` and ``extras_require``
+    create modified versions in a way that can be written in ``requires.txt``
+    """
+    extras = _convert_extras_requirements(extras_require)
+    return _move_install_requirements_markers(install_requires, extras)
+
+
+def _convert_extras_requirements(
+    extras_require: Mapping[str, _StrOrIter],
+) -> defaultdict[str, _Ordered[Requirement]]:
+    """
+    Convert requirements in `extras_require` of the form
+    `"extra": ["barbazquux; {marker}"]` to
+    `"extra:{marker}": ["barbazquux"]`.
+    """
+    output = defaultdict[str, _Ordered[Requirement]](dict)
+    for section, v in extras_require.items():
+        # Do not strip empty sections.
+        output[section]
+        for r in _reqs.parse(v):
+            output[section + _suffix_for(r)].setdefault(r)
+
+    return output
+
+
+def _move_install_requirements_markers(
+    install_requires: _StrOrIter, extras_require: Mapping[str, _Ordered[Requirement]]
+) -> tuple[list[str], dict[str, list[str]]]:
+    """
+    The ``requires.txt`` file has an specific format:
+        - Environment markers need to be part of the section headers and
+          should not be part of the requirement spec itself.
+
+    Move requirements in ``install_requires`` that are using environment
+    markers ``extras_require``.
+    """
+
+    # divide the install_requires into two sets, simple ones still
+    # handled by install_requires and more complex ones handled by extras_require.
+
+    inst_reqs = list(_reqs.parse(install_requires))
+    simple_reqs = filter(_no_marker, inst_reqs)
+    complex_reqs = filterfalse(_no_marker, inst_reqs)
+    simple_install_requires = list(map(str, simple_reqs))
+
+    for r in complex_reqs:
+        extras_require[':' + str(r.marker)].setdefault(r)
+
+    expanded_extras = dict(
+        # list(dict.fromkeys(...))  ensures a list of unique strings
+        (k, list(dict.fromkeys(str(r) for r in map(_clean_req, v))))
+        for k, v in extras_require.items()
+    )
+
+    return simple_install_requires, expanded_extras
+
+
+def _suffix_for(req):
+    """Return the 'extras_require' suffix for a given requirement."""
+    return ':' + str(req.marker) if req.marker else ''
+
+
+def _clean_req(req):
+    """Given a Requirement, remove environment markers and return it"""
+    r = Requirement(str(req))  # create a copy before modifying
+    r.marker = None
+    return r
+
+
+def _no_marker(req):
+    return not req.marker
+
+
+def _write_requirements(stream, reqs):
+    lines = yield_lines(reqs or ())
+
+    def append_cr(line):
+        return line + '\n'
+
+    lines = map(append_cr, lines)
+    stream.writelines(lines)
+
+
+def write_requirements(cmd, basename, filename):
+    dist = cmd.distribution
+    data = io.StringIO()
+    install_requires, extras_require = _prepare(
+        dist.install_requires or (), dist.extras_require or {}
+    )
+    _write_requirements(data, install_requires)
+    for extra in sorted(extras_require):
+        data.write('\n[{extra}]\n'.format(**vars()))
+        _write_requirements(data, extras_require[extra])
+    cmd.write_or_delete_file("requirements", filename, data.getvalue())
+
+
+def write_setup_requirements(cmd, basename, filename):
+    data = io.StringIO()
+    _write_requirements(data, cmd.distribution.setup_requires)
+    cmd.write_or_delete_file("setup-requirements", filename, data.getvalue())
diff --git a/.venv/lib/python3.12/site-packages/setuptools/command/alias.py b/.venv/lib/python3.12/site-packages/setuptools/command/alias.py
new file mode 100644
index 00000000..b8d74af7
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/setuptools/command/alias.py
@@ -0,0 +1,77 @@
+from setuptools.command.setopt import config_file, edit_config, option_base
+
+from distutils.errors import DistutilsOptionError
+
+
+def shquote(arg):
+    """Quote an argument for later parsing by shlex.split()"""
+    for c in '"', "'", "\\", "#":
+        if c in arg:
+            return repr(arg)
+    if arg.split() != [arg]:
+        return repr(arg)
+    return arg
+
+
+class alias(option_base):
+    """Define a shortcut that invokes one or more commands"""
+
+    description = "define a shortcut to invoke one or more commands"
+    command_consumes_arguments = True
+
+    user_options = [
+        ('remove', 'r', 'remove (unset) the alias'),
+    ] + option_base.user_options
+
+    boolean_options = option_base.boolean_options + ['remove']
+
+    def initialize_options(self):
+        option_base.initialize_options(self)
+        self.args = None
+        self.remove = None
+
+    def finalize_options(self) -> None:
+        option_base.finalize_options(self)
+        if self.remove and len(self.args) != 1:
+            raise DistutilsOptionError(
+                "Must specify exactly one argument (the alias name) when using --remove"
+            )
+
+    def run(self) -> None:
+        aliases = self.distribution.get_option_dict('aliases')
+
+        if not self.args:
+            print("Command Aliases")
+            print("---------------")
+            for alias in aliases:
+                print("setup.py alias", format_alias(alias, aliases))
+            return
+
+        elif len(self.args) == 1:
+            (alias,) = self.args
+            if self.remove:
+                command = None
+            elif alias in aliases:
+                print("setup.py alias", format_alias(alias, aliases))
+                return
+            else:
+                print(f"No alias definition found for {alias!r}")
+                return
+        else:
+            alias = self.args[0]
+            command = ' '.join(map(shquote, self.args[1:]))
+
+        edit_config(self.filename, {'aliases': {alias: command}}, self.dry_run)
+
+
+def format_alias(name, aliases):
+    source, command = aliases[name]
+    if source == config_file('global'):
+        source = '--global-config '
+    elif source == config_file('user'):
+        source = '--user-config '
+    elif source == config_file('local'):
+        source = ''
+    else:
+        source = f'--filename={source!r}'
+    return source + name + ' ' + command
diff --git a/.venv/lib/python3.12/site-packages/setuptools/command/bdist_egg.py b/.venv/lib/python3.12/site-packages/setuptools/command/bdist_egg.py
new file mode 100644
index 00000000..7f66c3ba
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/setuptools/command/bdist_egg.py
@@ -0,0 +1,479 @@
+"""setuptools.command.bdist_egg
+
+Build .egg distributions"""
+
+from __future__ import annotations
+
+import marshal
+import os
+import re
+import sys
+import textwrap
+from sysconfig import get_path, get_python_version
+from types import CodeType
+from typing import TYPE_CHECKING, Literal
+
+from setuptools import Command
+from setuptools.extension import Library
+
+from .._path import StrPathT, ensure_directory
+
+from distutils import log
+from distutils.dir_util import mkpath, remove_tree
+
+if TYPE_CHECKING:
+    from typing_extensions import TypeAlias
+
+# Same as zipfile._ZipFileMode from typeshed
+_ZipFileMode: TypeAlias = Literal["r", "w", "x", "a"]
+
+
+def _get_purelib():
+    return get_path("purelib")
+
+
+def strip_module(filename):
+    if '.' in filename:
+        filename = os.path.splitext(filename)[0]
+    if filename.endswith('module'):
+        filename = filename[:-6]
+    return filename
+
+
+def sorted_walk(dir):
+    """Do os.walk in a reproducible way,
+    independent of indeterministic filesystem readdir order
+    """
+    for base, dirs, files in os.walk(dir):
+        dirs.sort()
+        files.sort()
+        yield base, dirs, files
+
+
+def write_stub(resource, pyfile) -> None:
+    _stub_template = textwrap.dedent(
+        """
+        def __bootstrap__():
+            global __bootstrap__, __loader__, __file__
+            import sys, pkg_resources, importlib.util
+            __file__ = pkg_resources.resource_filename(__name__, %r)
+            __loader__ = None; del __bootstrap__, __loader__
+            spec = importlib.util.spec_from_file_location(__name__,__file__)
+            mod = importlib.util.module_from_spec(spec)
+            spec.loader.exec_module(mod)
+        __bootstrap__()
+        """
+    ).lstrip()
+    with open(pyfile, 'w', encoding="utf-8") as f:
+        f.write(_stub_template % resource)
+
+
+class bdist_egg(Command):
+    description = 'create an "egg" distribution'
+
+    user_options = [
+        ('bdist-dir=', 'b', "temporary directory for creating the distribution"),
+        (
+            'plat-name=',
+            'p',
+            "platform name to embed in generated filenames "
+            "(by default uses `pkg_resources.get_build_platform()`)",
+        ),
+        ('exclude-source-files', None, "remove all .py files from the generated egg"),
+        (
+            'keep-temp',
+            'k',
+            "keep the pseudo-installation tree around after "
+            "creating the distribution archive",
+        ),
+        ('dist-dir=', 'd', "directory to put final built distributions in"),
+        ('skip-build', None, "skip rebuilding everything (for testing/debugging)"),
+    ]
+
+    boolean_options = ['keep-temp', 'skip-build', 'exclude-source-files']
+
+    def initialize_options(self):
+        self.bdist_dir = None
+        self.plat_name = None
+        self.keep_temp = False
+        self.dist_dir = None
+        self.skip_build = False
+        self.egg_output = None
+        self.exclude_source_files = None
+
+    def finalize_options(self) -> None:
+        ei_cmd = self.ei_cmd = self.get_finalized_command("egg_info")
+        self.egg_info = ei_cmd.egg_info
+
+        if self.bdist_dir is None:
+            bdist_base = self.get_finalized_command('bdist').bdist_base
+            self.bdist_dir = os.path.join(bdist_base, 'egg')
+
+        if self.plat_name is None:
+            from pkg_resources import get_build_platform
+
+            self.plat_name = get_build_platform()
+
+        self.set_undefined_options('bdist', ('dist_dir', 'dist_dir'))
+
+        if self.egg_output is None:
+            # Compute filename of the output egg
+            basename = ei_cmd._get_egg_basename(
+                py_version=get_python_version(),
+                platform=self.distribution.has_ext_modules() and self.plat_name,
+            )
+
+            self.egg_output = os.path.join(self.dist_dir, basename + '.egg')
+
+    def do_install_data(self) -> None:
+        # Hack for packages that install data to install's --install-lib
+        self.get_finalized_command('install').install_lib = self.bdist_dir
+
+        site_packages = os.path.normcase(os.path.realpath(_get_purelib()))
+        old, self.distribution.data_files = self.distribution.data_files, []
+
+        for item in old:
+            if isinstance(item, tuple) and len(item) == 2:
+                if os.path.isabs(item[0]):
+                    realpath = os.path.realpath(item[0])
+                    normalized = os.path.normcase(realpath)
+                    if normalized == site_packages or normalized.startswith(
+                        site_packages + os.sep
+                    ):
+                        item = realpath[len(site_packages) + 1 :], item[1]
+                        # XXX else: raise ???
+            self.distribution.data_files.append(item)
+
+        try:
+            log.info("installing package data to %s", self.bdist_dir)
+            self.call_command('install_data', force=False, root=None)
+        finally:
+            self.distribution.data_files = old
+
+    def get_outputs(self):
+        return [self.egg_output]
+
+    def call_command(self, cmdname, **kw):
+        """Invoke reinitialized command `cmdname` with keyword args"""
+        for dirname in INSTALL_DIRECTORY_ATTRS:
+            kw.setdefault(dirname, self.bdist_dir)
+        kw.setdefault('skip_build', self.skip_build)
+        kw.setdefault('dry_run', self.dry_run)
+        cmd = self.reinitialize_command(cmdname, **kw)
+        self.run_command(cmdname)
+        return cmd
+
+    def run(self):  # noqa: C901  # is too complex (14)  # FIXME
+        # Generate metadata first
+        self.run_command("egg_info")
+        # We run install_lib before install_data, because some data hacks
+        # pull their data path from the install_lib command.
+        log.info("installing library code to %s", self.bdist_dir)
+        instcmd = self.get_finalized_command('install')
+        old_root = instcmd.root
+        instcmd.root = None
+        if self.distribution.has_c_libraries() and not self.skip_build:
+            self.run_command('build_clib')
+        cmd = self.call_command('install_lib', warn_dir=False)
+        instcmd.root = old_root
+
+        all_outputs, ext_outputs = self.get_ext_outputs()
+        self.stubs = []
+        to_compile = []
+        for p, ext_name in enumerate(ext_outputs):
+            filename, _ext = os.path.splitext(ext_name)
+            pyfile = os.path.join(self.bdist_dir, strip_module(filename) + '.py')
+            self.stubs.append(pyfile)
+            log.info("creating stub loader for %s", ext_name)
+            if not self.dry_run:
+                write_stub(os.path.basename(ext_name), pyfile)
+            to_compile.append(pyfile)
+            ext_outputs[p] = ext_name.replace(os.sep, '/')
+
+        if to_compile:
+            cmd.byte_compile(to_compile)
+        if self.distribution.data_files:
+            self.do_install_data()
+
+        # Make the EGG-INFO directory
+        archive_root = self.bdist_dir
+        egg_info = os.path.join(archive_root, 'EGG-INFO')
+        self.mkpath(egg_info)
+        if self.distribution.scripts:
+            script_dir = os.path.join(egg_info, 'scripts')
+            log.info("installing scripts to %s", script_dir)
+            self.call_command('install_scripts', install_dir=script_dir, no_ep=True)
+
+        self.copy_metadata_to(egg_info)
+        native_libs = os.path.join(egg_info, "native_libs.txt")
+        if all_outputs:
+            log.info("writing %s", native_libs)
+            if not self.dry_run:
+                ensure_directory(native_libs)
+                with open(native_libs, 'wt', encoding="utf-8") as libs_file:
+                    libs_file.write('\n'.join(all_outputs))
+                    libs_file.write('\n')
+        elif os.path.isfile(native_libs):
+            log.info("removing %s", native_libs)
+            if not self.dry_run:
+                os.unlink(native_libs)
+
+        write_safety_flag(os.path.join(archive_root, 'EGG-INFO'), self.zip_safe())
+
+        if os.path.exists(os.path.join(self.egg_info, 'depends.txt')):
+            log.warn(
+                "WARNING: 'depends.txt' will not be used by setuptools 0.6!\n"
+                "Use the install_requires/extras_require setup() args instead."
+            )
+
+        if self.exclude_source_files:
+            self.zap_pyfiles()
+
+        # Make the archive
+        make_zipfile(
+            self.egg_output,
+            archive_root,
+            verbose=self.verbose,
+            dry_run=self.dry_run,
+            mode=self.gen_header(),
+        )
+        if not self.keep_temp:
+            remove_tree(self.bdist_dir, dry_run=self.dry_run)
+
+        # Add to 'Distribution.dist_files' so that the "upload" command works
+        getattr(self.distribution, 'dist_files', []).append((
+            'bdist_egg',
+            get_python_version(),
+            self.egg_output,
+        ))
+
+    def zap_pyfiles(self):
+        log.info("Removing .py files from temporary directory")
+        for base, dirs, files in walk_egg(self.bdist_dir):
+            for name in files:
+                path = os.path.join(base, name)
+
+                if name.endswith('.py'):
+                    log.debug("Deleting %s", path)
+                    os.unlink(path)
+
+                if base.endswith('__pycache__'):
+                    path_old = path
+
+                    pattern = r'(?P<name>.+)\.(?P<magic>[^.]+)\.pyc'
+                    m = re.match(pattern, name)
+                    path_new = os.path.join(base, os.pardir, m.group('name') + '.pyc')
+                    log.info(f"Renaming file from [{path_old}] to [{path_new}]")
+                    try:
+                        os.remove(path_new)
+                    except OSError:
+                        pass
+                    os.rename(path_old, path_new)
+
+    def zip_safe(self):
+        safe = getattr(self.distribution, 'zip_safe', None)
+        if safe is not None:
+            return safe
+        log.warn("zip_safe flag not set; analyzing archive contents...")
+        return analyze_egg(self.bdist_dir, self.stubs)
+
+    def gen_header(self) -> Literal["w"]:
+        return 'w'
+
+    def copy_metadata_to(self, target_dir) -> None:
+        "Copy metadata (egg info) to the target_dir"
+        # normalize the path (so that a forward-slash in egg_info will
+        # match using startswith below)
+        norm_egg_info = os.path.normpath(self.egg_info)
+        prefix = os.path.join(norm_egg_info, '')
+        for path in self.ei_cmd.filelist.files:
+            if path.startswith(prefix):
+                target = os.path.join(target_dir, path[len(prefix) :])
+                ensure_directory(target)
+                self.copy_file(path, target)
+
+    def get_ext_outputs(self):
+        """Get a list of relative paths to C extensions in the output distro"""
+
+        all_outputs = []
+        ext_outputs = []
+
+        paths = {self.bdist_dir: ''}
+        for base, dirs, files in sorted_walk(self.bdist_dir):
+            all_outputs.extend(
+                paths[base] + filename
+                for filename in files
+                if os.path.splitext(filename)[1].lower() in NATIVE_EXTENSIONS
+            )
+            for filename in dirs:
+                paths[os.path.join(base, filename)] = paths[base] + filename + '/'
+
+        if self.distribution.has_ext_modules():
+            build_cmd = self.get_finalized_command('build_ext')
+            for ext in build_cmd.extensions:
+                if isinstance(ext, Library):
+                    continue
+                fullname = build_cmd.get_ext_fullname(ext.name)
+                filename = build_cmd.get_ext_filename(fullname)
+                if not os.path.basename(filename).startswith('dl-'):
+                    if os.path.exists(os.path.join(self.bdist_dir, filename)):
+                        ext_outputs.append(filename)
+
+        return all_outputs, ext_outputs
+
+
+NATIVE_EXTENSIONS: dict[str, None] = dict.fromkeys('.dll .so .dylib .pyd'.split())
+
+
+def walk_egg(egg_dir):
+    """Walk an unpacked egg's contents, skipping the metadata directory"""
+    walker = sorted_walk(egg_dir)
+    base, dirs, files = next(walker)
+    if 'EGG-INFO' in dirs:
+        dirs.remove('EGG-INFO')
+    yield base, dirs, files
+    yield from walker
+
+
+def analyze_egg(egg_dir, stubs):
+    # check for existing flag in EGG-INFO
+    for flag, fn in safety_flags.items():
+        if os.path.exists(os.path.join(egg_dir, 'EGG-INFO', fn)):
+            return flag
+    if not can_scan():
+        return False
+    safe = True
+    for base, dirs, files in walk_egg(egg_dir):
+        for name in files:
+            if name.endswith('.py') or name.endswith('.pyw'):
+                continue
+            elif name.endswith('.pyc') or name.endswith('.pyo'):
+                # always scan, even if we already know we're not safe
+                safe = scan_module(egg_dir, base, name, stubs) and safe
+    return safe
+
+
+def write_safety_flag(egg_dir, safe) -> None:
+    # Write or remove zip safety flag file(s)
+    for flag, fn in safety_flags.items():
+        fn = os.path.join(egg_dir, fn)
+        if os.path.exists(fn):
+            if safe is None or bool(safe) != flag:
+                os.unlink(fn)
+        elif safe is not None and bool(safe) == flag:
+            with open(fn, 'wt', encoding="utf-8") as f:
+                f.write('\n')
+
+
+safety_flags = {
+    True: 'zip-safe',
+    False: 'not-zip-safe',
+}
+
+
+def scan_module(egg_dir, base, name, stubs):
+    """Check whether module possibly uses unsafe-for-zipfile stuff"""
+
+    filename = os.path.join(base, name)
+    if filename[:-1] in stubs:
+        return True  # Extension module
+    pkg = base[len(egg_dir) + 1 :].replace(os.sep, '.')
+    module = pkg + (pkg and '.' or '') + os.path.splitext(name)[0]
+    skip = 16  # skip magic & reserved? & date & file size
+    f = open(filename, 'rb')
+    f.read(skip)
+    code = marshal.load(f)
+    f.close()
+    safe = True
+    symbols = dict.fromkeys(iter_symbols(code))
+    for bad in ['__file__', '__path__']:
+        if bad in symbols:
+            log.warn("%s: module references %s", module, bad)
+            safe = False
+    if 'inspect' in symbols:
+        for bad in [
+            'getsource',
+            'getabsfile',
+            'getfile',
+            'getsourcefile',
+            'getsourcelines',
+            'findsource',
+            'getcomments',
+            'getframeinfo',
+            'getinnerframes',
+            'getouterframes',
+            'stack',
+            'trace',
+        ]:
+            if bad in symbols:
+                log.warn("%s: module MAY be using inspect.%s", module, bad)
+                safe = False
+    return safe
+
+
+def iter_symbols(code):
+    """Yield names and strings used by `code` and its nested code objects"""
+    yield from code.co_names
+    for const in code.co_consts:
+        if isinstance(const, str):
+            yield const
+        elif isinstance(const, CodeType):
+            yield from iter_symbols(const)
+
+
+def can_scan() -> bool:
+    if not sys.platform.startswith('java') and sys.platform != 'cli':
+        # CPython, PyPy, etc.
+        return True
+    log.warn("Unable to analyze compiled code on this platform.")
+    log.warn(
+        "Please ask the author to include a 'zip_safe'"
+        " setting (either True or False) in the package's setup.py"
+    )
+    return False
+
+
+# Attribute names of options for commands that might need to be convinced to
+# install to the egg build directory
+
+INSTALL_DIRECTORY_ATTRS = ['install_lib', 'install_dir', 'install_data', 'install_base']
+
+
+def make_zipfile(
+    zip_filename: StrPathT,
+    base_dir,
+    verbose: bool = False,
+    dry_run: bool = False,
+    compress=True,
+    mode: _ZipFileMode = 'w',
+) -> StrPathT:
+    """Create a zip file from all the files under 'base_dir'.  The output
+    zip file will be named 'base_dir' + ".zip".  Uses either the "zipfile"
+    Python module (if available) or the InfoZIP "zip" utility (if installed
+    and found on the default search path).  If neither tool is available,
+    raises DistutilsExecError.  Returns the name of the output zip file.
+    """
+    import zipfile
+
+    mkpath(os.path.dirname(zip_filename), dry_run=dry_run)  # type: ignore[arg-type] # python/mypy#18075
+    log.info("creating '%s' and adding '%s' to it", zip_filename, base_dir)
+
+    def visit(z, dirname, names):
+        for name in names:
+            path = os.path.normpath(os.path.join(dirname, name))
+            if os.path.isfile(path):
+                p = path[len(base_dir) + 1 :]
+                if not dry_run:
+                    z.write(path, p)
+                log.debug("adding '%s'", p)
+
+    compression = zipfile.ZIP_DEFLATED if compress else zipfile.ZIP_STORED
+    if not dry_run:
+        z = zipfile.ZipFile(zip_filename, mode, compression=compression)
+        for dirname, dirs, files in sorted_walk(base_dir):
+            visit(z, dirname, files)
+        z.close()
+    else:
+        for dirname, dirs, files in sorted_walk(base_dir):
+            visit(None, dirname, files)
+    return zip_filename
diff --git a/.venv/lib/python3.12/site-packages/setuptools/command/bdist_rpm.py b/.venv/lib/python3.12/site-packages/setuptools/command/bdist_rpm.py
new file mode 100644
index 00000000..6dbb2700
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/setuptools/command/bdist_rpm.py
@@ -0,0 +1,42 @@
+from ..dist import Distribution
+from ..warnings import SetuptoolsDeprecationWarning
+
+import distutils.command.bdist_rpm as orig
+
+
+class bdist_rpm(orig.bdist_rpm):
+    """
+    Override the default bdist_rpm behavior to do the following:
+
+    1. Run egg_info to ensure the name and version are properly calculated.
+    2. Always run 'install' using --single-version-externally-managed to
+       disable eggs in RPM distributions.
+    """
+
+    distribution: Distribution  # override distutils.dist.Distribution with setuptools.dist.Distribution
+
+    def run(self) -> None:
+        SetuptoolsDeprecationWarning.emit(
+            "Deprecated command",
+            """
+            bdist_rpm is deprecated and will be removed in a future version.
+            Use bdist_wheel (wheel packages) instead.
+            """,
+            see_url="https://github.com/pypa/setuptools/issues/1988",
+            due_date=(2023, 10, 30),  # Deprecation introduced in 22 Oct 2021.
+        )
+
+        # ensure distro name is up-to-date
+        self.run_command('egg_info')
+
+        orig.bdist_rpm.run(self)
+
+    def _make_spec_file(self):
+        spec = orig.bdist_rpm._make_spec_file(self)
+        return [
+            line.replace(
+                "setup.py install ",
+                "setup.py install --single-version-externally-managed ",
+            ).replace("%setup", "%setup -n %{name}-%{unmangled_version}")
+            for line in spec
+        ]
diff --git a/.venv/lib/python3.12/site-packages/setuptools/command/bdist_wheel.py b/.venv/lib/python3.12/site-packages/setuptools/command/bdist_wheel.py
new file mode 100644
index 00000000..1e3f637b
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/setuptools/command/bdist_wheel.py
@@ -0,0 +1,604 @@
+"""
+Create a wheel (.whl) distribution.
+
+A wheel is a built archive format.
+"""
+
+from __future__ import annotations
+
+import os
+import re
+import shutil
+import struct
+import sys
+import sysconfig
+import warnings
+from collections.abc import Iterable, Sequence
+from email.generator import BytesGenerator
+from glob import iglob
+from typing import Literal, cast
+from zipfile import ZIP_DEFLATED, ZIP_STORED
+
+from packaging import tags, version as _packaging_version
+from wheel.wheelfile import WheelFile
+
+from .. import Command, __version__, _shutil
+from .._core_metadata import _safe_license_file
+from .._normalization import safer_name
+from ..warnings import SetuptoolsDeprecationWarning
+from .egg_info import egg_info as egg_info_cls
+
+from distutils import log
+
+
+def safe_version(version: str) -> str:
+    """
+    Convert an arbitrary string to a standard version string
+    """
+    try:
+        # normalize the version
+        return str(_packaging_version.Version(version))
+    except _packaging_version.InvalidVersion:
+        version = version.replace(" ", ".")
+        return re.sub("[^A-Za-z0-9.]+", "-", version)
+
+
+setuptools_major_version = int(__version__.split(".")[0])
+
+PY_LIMITED_API_PATTERN = r"cp3\d"
+
+
+def _is_32bit_interpreter() -> bool:
+    return struct.calcsize("P") == 4
+
+
+def python_tag() -> str:
+    return f"py{sys.version_info.major}"
+
+
+def get_platform(archive_root: str | None) -> str:
+    """Return our platform name 'win32', 'linux_x86_64'"""
+    result = sysconfig.get_platform()
+    if result.startswith("macosx") and archive_root is not None:  # pragma: no cover
+        from wheel.macosx_libfile import calculate_macosx_platform_tag
+
+        result = calculate_macosx_platform_tag(archive_root, result)
+    elif _is_32bit_interpreter():
+        if result == "linux-x86_64":
+            # pip pull request #3497
+            result = "linux-i686"
+        elif result == "linux-aarch64":
+            # packaging pull request #234
+            # TODO armv8l, packaging pull request #690 => this did not land
+            # in pip/packaging yet
+            result = "linux-armv7l"
+
+    return result.replace("-", "_")
+
+
+def get_flag(
+    var: str, fallback: bool, expected: bool = True, warn: bool = True
+) -> bool:
+    """Use a fallback value for determining SOABI flags if the needed config
+    var is unset or unavailable."""
+    val = sysconfig.get_config_var(var)
+    if val is None:
+        if warn:
+            warnings.warn(
+                f"Config variable '{var}' is unset, Python ABI tag may be incorrect",
+                RuntimeWarning,
+                stacklevel=2,
+            )
+        return fallback
+    return val == expected
+
+
+def get_abi_tag() -> str | None:
+    """Return the ABI tag based on SOABI (if available) or emulate SOABI (PyPy2)."""
+    soabi: str = sysconfig.get_config_var("SOABI")
+    impl = tags.interpreter_name()
+    if not soabi and impl in ("cp", "pp") and hasattr(sys, "maxunicode"):
+        d = ""
+        u = ""
+        if get_flag("Py_DEBUG", hasattr(sys, "gettotalrefcount"), warn=(impl == "cp")):
+            d = "d"
+
+        abi = f"{impl}{tags.interpreter_version()}{d}{u}"
+    elif soabi and impl == "cp" and soabi.startswith("cpython"):
+        # non-Windows
+        abi = "cp" + soabi.split("-")[1]
+    elif soabi and impl == "cp" and soabi.startswith("cp"):
+        # Windows
+        abi = soabi.split("-")[0]
+        if hasattr(sys, "gettotalrefcount"):
+            # using debug build; append "d" flag
+            abi += "d"
+    elif soabi and impl == "pp":
+        # we want something like pypy36-pp73
+        abi = "-".join(soabi.split("-")[:2])
+        abi = abi.replace(".", "_").replace("-", "_")
+    elif soabi and impl == "graalpy":
+        abi = "-".join(soabi.split("-")[:3])
+        abi = abi.replace(".", "_").replace("-", "_")
+    elif soabi:
+        abi = soabi.replace(".", "_").replace("-", "_")
+    else:
+        abi = None
+
+    return abi
+
+
+def safer_version(version: str) -> str:
+    return safe_version(version).replace("-", "_")
+
+
+class bdist_wheel(Command):
+    description = "create a wheel distribution"
+
+    supported_compressions = {
+        "stored": ZIP_STORED,
+        "deflated": ZIP_DEFLATED,
+    }
+
+    user_options = [
+        ("bdist-dir=", "b", "temporary directory for creating the distribution"),
+        (
+            "plat-name=",
+            "p",
+            "platform name to embed in generated filenames "
+            f"[default: {get_platform(None)}]",
+        ),
+        (
+            "keep-temp",
+            "k",
+            "keep the pseudo-installation tree around after "
+            "creating the distribution archive",
+        ),
+        ("dist-dir=", "d", "directory to put final built distributions in"),
+        ("skip-build", None, "skip rebuilding everything (for testing/debugging)"),
+        (
+            "relative",
+            None,
+            "build the archive using relative paths [default: false]",
+        ),
+        (
+            "owner=",
+            "u",
+            "Owner name used when creating a tar file [default: current user]",
+        ),
+        (
+            "group=",
+            "g",
+            "Group name used when creating a tar file [default: current group]",
+        ),
+        ("universal", None, "*DEPRECATED* make a universal wheel [default: false]"),
+        (
+            "compression=",
+            None,
+            f"zipfile compression (one of: {', '.join(supported_compressions)}) [default: 'deflated']",
+        ),
+        (
+            "python-tag=",
+            None,
+            f"Python implementation compatibility tag [default: '{python_tag()}']",
+        ),
+        (
+            "build-number=",
+            None,
+            "Build number for this particular version. "
+            "As specified in PEP-0427, this must start with a digit. "
+            "[default: None]",
+        ),
+        (
+            "py-limited-api=",
+            None,
+            "Python tag (cp32|cp33|cpNN) for abi3 wheel tag [default: false]",
+        ),
+        (
+            "dist-info-dir=",
+            None,
+            "directory where a pre-generated dist-info can be found (e.g. as a "
+            "result of calling the PEP517 'prepare_metadata_for_build_wheel' "
+            "method)",
+        ),
+    ]
+
+    boolean_options = ["keep-temp", "skip-build", "relative", "universal"]
+
+    def initialize_options(self) -> None:
+        self.bdist_dir: str | None = None
+        self.data_dir = ""
+        self.plat_name: str | None = None
+        self.plat_tag: str | None = None
+        self.format = "zip"
+        self.keep_temp = False
+        self.dist_dir: str | None = None
+        self.dist_info_dir = None
+        self.egginfo_dir: str | None = None
+        self.root_is_pure: bool | None = None
+        self.skip_build = False
+        self.relative = False
+        self.owner = None
+        self.group = None
+        self.universal = False
+        self.compression: str | int = "deflated"
+        self.python_tag = python_tag()
+        self.build_number: str | None = None
+        self.py_limited_api: str | Literal[False] = False
+        self.plat_name_supplied = False
+
+    def finalize_options(self) -> None:
+        if not self.bdist_dir:
+            bdist_base = self.get_finalized_command("bdist").bdist_base
+            self.bdist_dir = os.path.join(bdist_base, "wheel")
+
+        if self.dist_info_dir is None:
+            egg_info = cast(egg_info_cls, self.distribution.get_command_obj("egg_info"))
+            egg_info.ensure_finalized()  # needed for correct `wheel_dist_name`
+
+        self.data_dir = self.wheel_dist_name + ".data"
+        self.plat_name_supplied = bool(self.plat_name)
+
+        need_options = ("dist_dir", "plat_name", "skip_build")
+
+        self.set_undefined_options("bdist", *zip(need_options, need_options))
+
+        self.root_is_pure = not (
+            self.distribution.has_ext_modules() or self.distribution.has_c_libraries()
+        )
+
+        self._validate_py_limited_api()
+
+        # Support legacy [wheel] section for setting universal
+        wheel = self.distribution.get_option_dict("wheel")
+        if "universal" in wheel:  # pragma: no cover
+            # please don't define this in your global configs
+            log.warn("The [wheel] section is deprecated. Use [bdist_wheel] instead.")
+            val = wheel["universal"][1].strip()
+            if val.lower() in ("1", "true", "yes"):
+                self.universal = True
+
+        if self.universal:
+            SetuptoolsDeprecationWarning.emit(
+                "bdist_wheel.universal is deprecated",
+                """
+                With Python 2.7 end-of-life, support for building universal wheels
+                (i.e., wheels that support both Python 2 and Python 3)
+                is being obviated.
+                Please discontinue using this option, or if you still need it,
+                file an issue with pypa/setuptools describing your use case.
+                """,
+                due_date=(2025, 8, 30),  # Introduced in 2024-08-30
+            )
+
+        if self.build_number is not None and not self.build_number[:1].isdigit():
+            raise ValueError("Build tag (build-number) must start with a digit.")
+
+    def _validate_py_limited_api(self) -> None:
+        if not self.py_limited_api:
+            return
+
+        if not re.match(PY_LIMITED_API_PATTERN, self.py_limited_api):
+            raise ValueError(f"py-limited-api must match '{PY_LIMITED_API_PATTERN}'")
+
+        if sysconfig.get_config_var("Py_GIL_DISABLED"):
+            raise ValueError(
+                f"`py_limited_api={self.py_limited_api!r}` not supported. "
+                "`Py_LIMITED_API` is currently incompatible with "
+                "`Py_GIL_DISABLED`."
+                "See https://github.com/python/cpython/issues/111506."
+            )
+
+    @property
+    def wheel_dist_name(self) -> str:
+        """Return distribution full name with - replaced with _"""
+        components = [
+            safer_name(self.distribution.get_name()),
+            safer_version(self.distribution.get_version()),
+        ]
+        if self.build_number:
+            components.append(self.build_number)
+        return "-".join(components)
+
+    def get_tag(self) -> tuple[str, str, str]:
+        # bdist sets self.plat_name if unset, we should only use it for purepy
+        # wheels if the user supplied it.
+        if self.plat_name_supplied and self.plat_name:
+            plat_name = self.plat_name
+        elif self.root_is_pure:
+            plat_name = "any"
+        else:
+            # macosx contains system version in platform name so need special handle
+            if self.plat_name and not self.plat_name.startswith("macosx"):
+                plat_name = self.plat_name
+            else:
+                # on macosx always limit the platform name to comply with any
+                # c-extension modules in bdist_dir, since the user can specify
+                # a higher MACOSX_DEPLOYMENT_TARGET via tools like CMake
+
+                # on other platforms, and on macosx if there are no c-extension
+                # modules, use the default platform name.
+                plat_name = get_platform(self.bdist_dir)
+
+            if _is_32bit_interpreter():
+                if plat_name in ("linux-x86_64", "linux_x86_64"):
+                    plat_name = "linux_i686"
+                if plat_name in ("linux-aarch64", "linux_aarch64"):
+                    # TODO armv8l, packaging pull request #690 => this did not land
+                    # in pip/packaging yet
+                    plat_name = "linux_armv7l"
+
+        plat_name = (
+            plat_name.lower().replace("-", "_").replace(".", "_").replace(" ", "_")
+        )
+
+        if self.root_is_pure:
+            if self.universal:
+                impl = "py2.py3"
+            else:
+                impl = self.python_tag
+            tag = (impl, "none", plat_name)
+        else:
+            impl_name = tags.interpreter_name()
+            impl_ver = tags.interpreter_version()
+            impl = impl_name + impl_ver
+            # We don't work on CPython 3.1, 3.0.
+            if self.py_limited_api and (impl_name + impl_ver).startswith("cp3"):
+                impl = self.py_limited_api
+                abi_tag = "abi3"
+            else:
+                abi_tag = str(get_abi_tag()).lower()
+            tag = (impl, abi_tag, plat_name)
+            # issue gh-374: allow overriding plat_name
+            supported_tags = [
+                (t.interpreter, t.abi, plat_name) for t in tags.sys_tags()
+            ]
+            assert tag in supported_tags, (
+                f"would build wheel with unsupported tag {tag}"
+            )
+        return tag
+
+    def run(self):
+        build_scripts = self.reinitialize_command("build_scripts")
+        build_scripts.executable = "python"
+        build_scripts.force = True
+
+        build_ext = self.reinitialize_command("build_ext")
+        build_ext.inplace = False
+
+        if not self.skip_build:
+            self.run_command("build")
+
+        install = self.reinitialize_command("install", reinit_subcommands=True)
+        install.root = self.bdist_dir
+        install.compile = False
+        install.skip_build = self.skip_build
+        install.warn_dir = False
+
+        # A wheel without setuptools scripts is more cross-platform.
+        # Use the (undocumented) `no_ep` option to setuptools'
+        # install_scripts command to avoid creating entry point scripts.
+        install_scripts = self.reinitialize_command("install_scripts")
+        install_scripts.no_ep = True
+
+        # Use a custom scheme for the archive, because we have to decide
+        # at installation time which scheme to use.
+        for key in ("headers", "scripts", "data", "purelib", "platlib"):
+            setattr(install, "install_" + key, os.path.join(self.data_dir, key))
+
+        basedir_observed = ""
+
+        if os.name == "nt":
+            # win32 barfs if any of these are ''; could be '.'?
+            # (distutils.command.install:change_roots bug)
+            basedir_observed = os.path.normpath(os.path.join(self.data_dir, ".."))
+            self.install_libbase = self.install_lib = basedir_observed
+
+        setattr(
+            install,
+            "install_purelib" if self.root_is_pure else "install_platlib",
+            basedir_observed,
+        )
+
+        log.info(f"installing to {self.bdist_dir}")
+
+        self.run_command("install")
+
+        impl_tag, abi_tag, plat_tag = self.get_tag()
+        archive_basename = f"{self.wheel_dist_name}-{impl_tag}-{abi_tag}-{plat_tag}"
+        if not self.relative:
+            archive_root = self.bdist_dir
+        else:
+            archive_root = os.path.join(
+                self.bdist_dir, self._ensure_relative(install.install_base)
+            )
+
+        self.set_undefined_options("install_egg_info", ("target", "egginfo_dir"))
+        distinfo_dirname = (
+            f"{safer_name(self.distribution.get_name())}-"
+            f"{safer_version(self.distribution.get_version())}.dist-info"
+        )
+        distinfo_dir = os.path.join(self.bdist_dir, distinfo_dirname)
+        if self.dist_info_dir:
+            # Use the given dist-info directly.
+            log.debug(f"reusing {self.dist_info_dir}")
+            shutil.copytree(self.dist_info_dir, distinfo_dir)
+            # Egg info is still generated, so remove it now to avoid it getting
+            # copied into the wheel.
+            _shutil.rmtree(self.egginfo_dir)
+        else:
+            # Convert the generated egg-info into dist-info.
+            self.egg2dist(self.egginfo_dir, distinfo_dir)
+
+        self.write_wheelfile(distinfo_dir)
+
+        # Make the archive
+        if not os.path.exists(self.dist_dir):
+            os.makedirs(self.dist_dir)
+
+        wheel_path = os.path.join(self.dist_dir, archive_basename + ".whl")
+        with WheelFile(wheel_path, "w", self._zip_compression()) as wf:
+            wf.write_files(archive_root)
+
+        # Add to 'Distribution.dist_files' so that the "upload" command works
+        getattr(self.distribution, "dist_files", []).append((
+            "bdist_wheel",
+            f"{sys.version_info.major}.{sys.version_info.minor}",
+            wheel_path,
+        ))
+
+        if not self.keep_temp:
+            log.info(f"removing {self.bdist_dir}")
+            if not self.dry_run:
+                _shutil.rmtree(self.bdist_dir)
+
+    def write_wheelfile(
+        self, wheelfile_base: str, generator: str = f"setuptools ({__version__})"
+    ) -> None:
+        from email.message import Message
+
+        msg = Message()
+        msg["Wheel-Version"] = "1.0"  # of the spec
+        msg["Generator"] = generator
+        msg["Root-Is-Purelib"] = str(self.root_is_pure).lower()
+        if self.build_number is not None:
+            msg["Build"] = self.build_number
+
+        # Doesn't work for bdist_wininst
+        impl_tag, abi_tag, plat_tag = self.get_tag()
+        for impl in impl_tag.split("."):
+            for abi in abi_tag.split("."):
+                for plat in plat_tag.split("."):
+                    msg["Tag"] = "-".join((impl, abi, plat))
+
+        wheelfile_path = os.path.join(wheelfile_base, "WHEEL")
+        log.info(f"creating {wheelfile_path}")
+        with open(wheelfile_path, "wb") as f:
+            BytesGenerator(f, maxheaderlen=0).flatten(msg)
+
+    def _ensure_relative(self, path: str) -> str:
+        # copied from dir_util, deleted
+        drive, path = os.path.splitdrive(path)
+        if path[0:1] == os.sep:
+            path = drive + path[1:]
+        return path
+
+    @property
+    def license_paths(self) -> Iterable[str]:
+        if setuptools_major_version >= 57:
+            # Setuptools has resolved any patterns to actual file names
+            return self.distribution.metadata.license_files or ()
+
+        files = set[str]()
+        metadata = self.distribution.get_option_dict("metadata")
+        if setuptools_major_version >= 42:
+            # Setuptools recognizes the license_files option but does not do globbing
+            patterns = cast(Sequence[str], self.distribution.metadata.license_files)
+        else:
+            # Prior to those, wheel is entirely responsible for handling license files
+            if "license_files" in metadata:
+                patterns = metadata["license_files"][1].split()
+            else:
+                patterns = ()
+
+        if "license_file" in metadata:
+            warnings.warn(
+                'The "license_file" option is deprecated. Use "license_files" instead.',
+                DeprecationWarning,
+                stacklevel=2,
+            )
+            files.add(metadata["license_file"][1])
+
+        if not files and not patterns and not isinstance(patterns, list):
+            patterns = ("LICEN[CS]E*", "COPYING*", "NOTICE*", "AUTHORS*")
+
+        for pattern in patterns:
+            for path in iglob(pattern):
+                if path.endswith("~"):
+                    log.debug(
+                        f'ignoring license file "{path}" as it looks like a backup'
+                    )
+                    continue
+
+                if path not in files and os.path.isfile(path):
+                    log.info(
+                        f'adding license file "{path}" (matched pattern "{pattern}")'
+                    )
+                    files.add(path)
+
+        return files
+
+    def egg2dist(self, egginfo_path: str, distinfo_path: str) -> None:
+        """Convert an .egg-info directory into a .dist-info directory"""
+
+        def adios(p: str) -> None:
+            """Appropriately delete directory, file or link."""
+            if os.path.exists(p) and not os.path.islink(p) and os.path.isdir(p):
+                _shutil.rmtree(p)
+            elif os.path.exists(p):
+                os.unlink(p)
+
+        adios(distinfo_path)
+
+        if not os.path.exists(egginfo_path):
+            # There is no egg-info. This is probably because the egg-info
+            # file/directory is not named matching the distribution name used
+            # to name the archive file. Check for this case and report
+            # accordingly.
+            import glob
+
+            pat = os.path.join(os.path.dirname(egginfo_path), "*.egg-info")
+            possible = glob.glob(pat)
+            err = f"Egg metadata expected at {egginfo_path} but not found"
+            if possible:
+                alt = os.path.basename(possible[0])
+                err += f" ({alt} found - possible misnamed archive file?)"
+
+            raise ValueError(err)
+
+        # .egg-info is a directory
+        pkginfo_path = os.path.join(egginfo_path, "PKG-INFO")
+
+        # ignore common egg metadata that is useless to wheel
+        shutil.copytree(
+            egginfo_path,
+            distinfo_path,
+            ignore=lambda x, y: {
+                "PKG-INFO",
+                "requires.txt",
+                "SOURCES.txt",
+                "not-zip-safe",
+            },
+        )
+
+        # delete dependency_links if it is only whitespace
+        dependency_links_path = os.path.join(distinfo_path, "dependency_links.txt")
+        with open(dependency_links_path, encoding="utf-8") as dependency_links_file:
+            dependency_links = dependency_links_file.read().strip()
+        if not dependency_links:
+            adios(dependency_links_path)
+
+        metadata_path = os.path.join(distinfo_path, "METADATA")
+        shutil.copy(pkginfo_path, metadata_path)
+
+        licenses_folder_path = os.path.join(distinfo_path, "licenses")
+        for license_path in self.license_paths:
+            safe_path = _safe_license_file(license_path)
+            dist_info_license_path = os.path.join(licenses_folder_path, safe_path)
+            os.makedirs(os.path.dirname(dist_info_license_path), exist_ok=True)
+            shutil.copy(license_path, dist_info_license_path)
+
+        adios(egginfo_path)
+
+    def _zip_compression(self) -> int:
+        if (
+            isinstance(self.compression, int)
+            and self.compression in self.supported_compressions.values()
+        ):
+            return self.compression
+
+        compression = self.supported_compressions.get(str(self.compression))
+        if compression is not None:
+            return compression
+
+        raise ValueError(f"Unsupported compression: {self.compression!r}")
diff --git a/.venv/lib/python3.12/site-packages/setuptools/command/build.py b/.venv/lib/python3.12/site-packages/setuptools/command/build.py
new file mode 100644
index 00000000..54cbb8d2
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/setuptools/command/build.py
@@ -0,0 +1,135 @@
+from __future__ import annotations
+
+from typing import Protocol
+
+from ..dist import Distribution
+
+from distutils.command.build import build as _build
+
+_ORIGINAL_SUBCOMMANDS = {"build_py", "build_clib", "build_ext", "build_scripts"}
+
+
+class build(_build):
+    distribution: Distribution  # override distutils.dist.Distribution with setuptools.dist.Distribution
+
+    # copy to avoid sharing the object with parent class
+    sub_commands = _build.sub_commands[:]
+
+
+class SubCommand(Protocol):
+    """In order to support editable installations (see :pep:`660`) all
+    build subcommands **SHOULD** implement this protocol. They also **MUST** inherit
+    from ``setuptools.Command``.
+
+    When creating an :pep:`editable wheel <660>`, ``setuptools`` will try to evaluate
+    custom ``build`` subcommands using the following procedure:
+
+    1. ``setuptools`` will set the ``editable_mode`` attribute to ``True``
+    2. ``setuptools`` will execute the ``run()`` command.
+
+       .. important::
+          Subcommands **SHOULD** take advantage of ``editable_mode=True`` to adequate
+          its behaviour or perform optimisations.
+
+          For example, if a subcommand doesn't need to generate an extra file and
+          all it does is to copy a source file into the build directory,
+          ``run()`` **SHOULD** simply "early return".
+
+          Similarly, if the subcommand creates files that would be placed alongside
+          Python files in the final distribution, during an editable install
+          the command **SHOULD** generate these files "in place" (i.e. write them to
+          the original source directory, instead of using the build directory).
+          Note that ``get_output_mapping()`` should reflect that and include mappings
+          for "in place" builds accordingly.
+
+    3. ``setuptools`` use any knowledge it can derive from the return values of
+       ``get_outputs()`` and ``get_output_mapping()`` to create an editable wheel.
+       When relevant ``setuptools`` **MAY** attempt to use file links based on the value
+       of ``get_output_mapping()``. Alternatively, ``setuptools`` **MAY** attempt to use
+       :doc:`import hooks <python:reference/import>` to redirect any attempt to import
+       to the directory with the original source code and other files built in place.
+
+    Please note that custom sub-commands **SHOULD NOT** rely on ``run()`` being
+    executed (or not) to provide correct return values for ``get_outputs()``,
+    ``get_output_mapping()`` or ``get_source_files()``. The ``get_*`` methods should
+    work independently of ``run()``.
+    """
+
+    editable_mode: bool = False
+    """Boolean flag that will be set to ``True`` when setuptools is used for an
+    editable installation (see :pep:`660`).
+    Implementations **SHOULD** explicitly set the default value of this attribute to
+    ``False``.
+    When subcommands run, they can use this flag to perform optimizations or change
+    their behaviour accordingly.
+    """
+
+    build_lib: str
+    """String representing the directory where the build artifacts should be stored,
+    e.g. ``build/lib``.
+    For example, if a distribution wants to provide a Python module named ``pkg.mod``,
+    then a corresponding file should be written to ``{build_lib}/package/module.py``.
+    A way of thinking about this is that the files saved under ``build_lib``
+    would be eventually copied to one of the directories in :obj:`site.PREFIXES`
+    upon installation.
+
+    A command that produces platform-independent files (e.g. compiling text templates
+    into Python functions), **CAN** initialize ``build_lib`` by copying its value from
+    the ``build_py`` command. On the other hand, a command that produces
+    platform-specific files **CAN** initialize ``build_lib`` by copying its value from
+    the ``build_ext`` command. In general this is done inside the ``finalize_options``
+    method with the help of the ``set_undefined_options`` command::
+
+        def finalize_options(self):
+            self.set_undefined_options("build_py", ("build_lib", "build_lib"))
+            ...
+    """
+
+    def initialize_options(self) -> None:
+        """(Required by the original :class:`setuptools.Command` interface)"""
+        ...
+
+    def finalize_options(self) -> None:
+        """(Required by the original :class:`setuptools.Command` interface)"""
+        ...
+
+    def run(self) -> None:
+        """(Required by the original :class:`setuptools.Command` interface)"""
+        ...
+
+    def get_source_files(self) -> list[str]:
+        """
+        Return a list of all files that are used by the command to create the expected
+        outputs.
+        For example, if your build command transpiles Java files into Python, you should
+        list here all the Java files.
+        The primary purpose of this function is to help populating the ``sdist``
+        with all the files necessary to build the distribution.
+        All files should be strings relative to the project root directory.
+        """
+        ...
+
+    def get_outputs(self) -> list[str]:
+        """
+        Return a list of files intended for distribution as they would have been
+        produced by the build.
+        These files should be strings in the form of
+        ``"{build_lib}/destination/file/path"``.
+
+        .. note::
+           The return value of ``get_output()`` should include all files used as keys
+           in ``get_output_mapping()`` plus files that are generated during the build
+           and don't correspond to any source file already present in the project.
+        """
+        ...
+
+    def get_output_mapping(self) -> dict[str, str]:
+        """
+        Return a mapping between destination files as they would be produced by the
+        build (dict keys) into the respective existing (source) files (dict values).
+        Existing (source) files should be represented as strings relative to the project
+        root directory.
+        Destination files should be strings in the form of
+        ``"{build_lib}/destination/file/path"``.
+        """
+        ...
diff --git a/.venv/lib/python3.12/site-packages/setuptools/command/build_clib.py b/.venv/lib/python3.12/site-packages/setuptools/command/build_clib.py
new file mode 100644
index 00000000..f376f4ce
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/setuptools/command/build_clib.py
@@ -0,0 +1,103 @@
+from ..dist import Distribution
+from ..modified import newer_pairwise_group
+
+import distutils.command.build_clib as orig
+from distutils import log
+from distutils.errors import DistutilsSetupError
+
+
+class build_clib(orig.build_clib):
+    """
+    Override the default build_clib behaviour to do the following:
+
+    1. Implement a rudimentary timestamp-based dependency system
+       so 'compile()' doesn't run every time.
+    2. Add more keys to the 'build_info' dictionary:
+        * obj_deps - specify dependencies for each object compiled.
+                     this should be a dictionary mapping a key
+                     with the source filename to a list of
+                     dependencies. Use an empty string for global
+                     dependencies.
+        * cflags   - specify a list of additional flags to pass to
+                     the compiler.
+    """
+
+    distribution: Distribution  # override distutils.dist.Distribution with setuptools.dist.Distribution
+
+    def build_libraries(self, libraries) -> None:
+        for lib_name, build_info in libraries:
+            sources = build_info.get('sources')
+            if sources is None or not isinstance(sources, (list, tuple)):
+                raise DistutilsSetupError(
+                    f"in 'libraries' option (library '{lib_name}'), "
+                    "'sources' must be present and must be "
+                    "a list of source filenames"
+                )
+            sources = sorted(list(sources))
+
+            log.info("building '%s' library", lib_name)
+
+            # Make sure everything is the correct type.
+            # obj_deps should be a dictionary of keys as sources
+            # and a list/tuple of files that are its dependencies.
+            obj_deps = build_info.get('obj_deps', dict())
+            if not isinstance(obj_deps, dict):
+                raise DistutilsSetupError(
+                    f"in 'libraries' option (library '{lib_name}'), "
+                    "'obj_deps' must be a dictionary of "
+                    "type 'source: list'"
+                )
+            dependencies = []
+
+            # Get the global dependencies that are specified by the '' key.
+            # These will go into every source's dependency list.
+            global_deps = obj_deps.get('', list())
+            if not isinstance(global_deps, (list, tuple)):
+                raise DistutilsSetupError(
+                    f"in 'libraries' option (library '{lib_name}'), "
+                    "'obj_deps' must be a dictionary of "
+                    "type 'source: list'"
+                )
+
+            # Build the list to be used by newer_pairwise_group
+            # each source will be auto-added to its dependencies.
+            for source in sources:
+                src_deps = [source]
+                src_deps.extend(global_deps)
+                extra_deps = obj_deps.get(source, list())
+                if not isinstance(extra_deps, (list, tuple)):
+                    raise DistutilsSetupError(
+                        f"in 'libraries' option (library '{lib_name}'), "
+                        "'obj_deps' must be a dictionary of "
+                        "type 'source: list'"
+                    )
+                src_deps.extend(extra_deps)
+                dependencies.append(src_deps)
+
+            expected_objects = self.compiler.object_filenames(
+                sources,
+                output_dir=self.build_temp,
+            )
+
+            if newer_pairwise_group(dependencies, expected_objects) != ([], []):
+                # First, compile the source code to object files in the library
+                # directory.  (This should probably change to putting object
+                # files in a temporary build directory.)
+                macros = build_info.get('macros')
+                include_dirs = build_info.get('include_dirs')
+                cflags = build_info.get('cflags')
+                self.compiler.compile(
+                    sources,
+                    output_dir=self.build_temp,
+                    macros=macros,
+                    include_dirs=include_dirs,
+                    extra_postargs=cflags,
+                    debug=self.debug,
+                )
+
+            # Now "link" the object files together into a static library.
+            # (On Unix at least, this isn't really linking -- it just
+            # builds an archive.  Whatever.)
+            self.compiler.create_static_lib(
+                expected_objects, lib_name, output_dir=self.build_clib, debug=self.debug
+            )
diff --git a/.venv/lib/python3.12/site-packages/setuptools/command/build_ext.py b/.venv/lib/python3.12/site-packages/setuptools/command/build_ext.py
new file mode 100644
index 00000000..be833a37
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/setuptools/command/build_ext.py
@@ -0,0 +1,469 @@
+from __future__ import annotations
+
+import itertools
+import os
+import sys
+from collections.abc import Iterator
+from importlib.machinery import EXTENSION_SUFFIXES
+from importlib.util import cache_from_source as _compiled_file_name
+from pathlib import Path
+from typing import TYPE_CHECKING
+
+from setuptools.dist import Distribution
+from setuptools.errors import BaseError
+from setuptools.extension import Extension, Library
+
+from distutils import log
+from distutils.ccompiler import new_compiler
+from distutils.sysconfig import customize_compiler, get_config_var
+
+if TYPE_CHECKING:
+    # Cython not installed on CI tests, causing _build_ext to be `Any`
+    from distutils.command.build_ext import build_ext as _build_ext
+else:
+    try:
+        # Attempt to use Cython for building extensions, if available
+        from Cython.Distutils.build_ext import build_ext as _build_ext
+
+        # Additionally, assert that the compiler module will load
+        # also. Ref #1229.
+        __import__('Cython.Compiler.Main')
+    except ImportError:
+        from distutils.command.build_ext import build_ext as _build_ext
+
+# make sure _config_vars is initialized
+get_config_var("LDSHARED")
+# Not publicly exposed in typeshed distutils stubs, but this is done on purpose
+# See https://github.com/pypa/setuptools/pull/4228#issuecomment-1959856400
+from distutils.sysconfig import _config_vars as _CONFIG_VARS  # noqa: E402
+
+
+def _customize_compiler_for_shlib(compiler):
+    if sys.platform == "darwin":
+        # building .dylib requires additional compiler flags on OSX; here we
+        # temporarily substitute the pyconfig.h variables so that distutils'
+        # 'customize_compiler' uses them before we build the shared libraries.
+        tmp = _CONFIG_VARS.copy()
+        try:
+            # XXX Help!  I don't have any idea whether these are right...
+            _CONFIG_VARS['LDSHARED'] = (
+                "gcc -Wl,-x -dynamiclib -undefined dynamic_lookup"
+            )
+            _CONFIG_VARS['CCSHARED'] = " -dynamiclib"
+            _CONFIG_VARS['SO'] = ".dylib"
+            customize_compiler(compiler)
+        finally:
+            _CONFIG_VARS.clear()
+            _CONFIG_VARS.update(tmp)
+    else:
+        customize_compiler(compiler)
+
+
+have_rtld = False
+use_stubs = False
+libtype = 'shared'
+
+if sys.platform == "darwin":
+    use_stubs = True
+elif os.name != 'nt':
+    try:
+        import dl  # type: ignore[import-not-found] # https://github.com/python/mypy/issues/13002
+
+        use_stubs = have_rtld = hasattr(dl, 'RTLD_NOW')
+    except ImportError:
+        pass
+
+
+def if_dl(s):
+    return s if have_rtld else ''
+
+
+def get_abi3_suffix():
+    """Return the file extension for an abi3-compliant Extension()"""
+    for suffix in EXTENSION_SUFFIXES:
+        if '.abi3' in suffix:  # Unix
+            return suffix
+        elif suffix == '.pyd':  # Windows
+            return suffix
+    return None
+
+
+class build_ext(_build_ext):
+    distribution: Distribution  # override distutils.dist.Distribution with setuptools.dist.Distribution
+    editable_mode = False
+    inplace = False
+
+    def run(self):
+        """Build extensions in build directory, then copy if --inplace"""
+        old_inplace, self.inplace = self.inplace, False
+        _build_ext.run(self)
+        self.inplace = old_inplace
+        if old_inplace:
+            self.copy_extensions_to_source()
+
+    def _get_inplace_equivalent(self, build_py, ext: Extension) -> tuple[str, str]:
+        fullname = self.get_ext_fullname(ext.name)
+        filename = self.get_ext_filename(fullname)
+        modpath = fullname.split('.')
+        package = '.'.join(modpath[:-1])
+        package_dir = build_py.get_package_dir(package)
+        inplace_file = os.path.join(package_dir, os.path.basename(filename))
+        regular_file = os.path.join(self.build_lib, filename)
+        return (inplace_file, regular_file)
+
+    def copy_extensions_to_source(self) -> None:
+        build_py = self.get_finalized_command('build_py')
+        for ext in self.extensions:
+            inplace_file, regular_file = self._get_inplace_equivalent(build_py, ext)
+
+            # Always copy, even if source is older than destination, to ensure
+            # that the right extensions for the current Python/platform are
+            # used.
+            if os.path.exists(regular_file) or not ext.optional:
+                self.copy_file(regular_file, inplace_file, level=self.verbose)
+
+            if ext._needs_stub:
+                inplace_stub = self._get_equivalent_stub(ext, inplace_file)
+                self._write_stub_file(inplace_stub, ext, compile=True)
+                # Always compile stub and remove the original (leave the cache behind)
+                # (this behaviour was observed in previous iterations of the code)
+
+    def _get_equivalent_stub(self, ext: Extension, output_file: str) -> str:
+        dir_ = os.path.dirname(output_file)
+        _, _, name = ext.name.rpartition(".")
+        return f"{os.path.join(dir_, name)}.py"
+
+    def _get_output_mapping(self) -> Iterator[tuple[str, str]]:
+        if not self.inplace:
+            return
+
+        build_py = self.get_finalized_command('build_py')
+        opt = self.get_finalized_command('install_lib').optimize or ""
+
+        for ext in self.extensions:
+            inplace_file, regular_file = self._get_inplace_equivalent(build_py, ext)
+            yield (regular_file, inplace_file)
+
+            if ext._needs_stub:
+                # This version of `build_ext` always builds artifacts in another dir,
+                # when "inplace=True" is given it just copies them back.
+                # This is done in the `copy_extensions_to_source` function, which
+                # always compile stub files via `_compile_and_remove_stub`.
+                # At the end of the process, a `.pyc` stub file is created without the
+                # corresponding `.py`.
+
+                inplace_stub = self._get_equivalent_stub(ext, inplace_file)
+                regular_stub = self._get_equivalent_stub(ext, regular_file)
+                inplace_cache = _compiled_file_name(inplace_stub, optimization=opt)
+                output_cache = _compiled_file_name(regular_stub, optimization=opt)
+                yield (output_cache, inplace_cache)
+
+    def get_ext_filename(self, fullname: str) -> str:
+        so_ext = os.getenv('SETUPTOOLS_EXT_SUFFIX')
+        if so_ext:
+            filename = os.path.join(*fullname.split('.')) + so_ext
+        else:
+            filename = _build_ext.get_ext_filename(self, fullname)
+            ext_suffix = get_config_var('EXT_SUFFIX')
+            if not isinstance(ext_suffix, str):
+                raise OSError(
+                    "Configuration variable EXT_SUFFIX not found for this platform "
+                    "and environment variable SETUPTOOLS_EXT_SUFFIX is missing"
+                )
+            so_ext = ext_suffix
+
+        if fullname in self.ext_map:
+            ext = self.ext_map[fullname]
+            abi3_suffix = get_abi3_suffix()
+            if ext.py_limited_api and abi3_suffix:  # Use abi3
+                filename = filename[: -len(so_ext)] + abi3_suffix
+            if isinstance(ext, Library):
+                fn, ext = os.path.splitext(filename)
+                return self.shlib_compiler.library_filename(fn, libtype)
+            elif use_stubs and ext._links_to_dynamic:
+                d, fn = os.path.split(filename)
+                return os.path.join(d, 'dl-' + fn)
+        return filename
+
+    def initialize_options(self):
+        _build_ext.initialize_options(self)
+        self.shlib_compiler = None
+        self.shlibs = []
+        self.ext_map = {}
+        self.editable_mode = False
+
+    def finalize_options(self) -> None:
+        _build_ext.finalize_options(self)
+        self.extensions = self.extensions or []
+        self.check_extensions_list(self.extensions)
+        self.shlibs = [ext for ext in self.extensions if isinstance(ext, Library)]
+        if self.shlibs:
+            self.setup_shlib_compiler()
+        for ext in self.extensions:
+            ext._full_name = self.get_ext_fullname(ext.name)
+        for ext in self.extensions:
+            fullname = ext._full_name
+            self.ext_map[fullname] = ext
+
+            # distutils 3.1 will also ask for module names
+            # XXX what to do with conflicts?
+            self.ext_map[fullname.split('.')[-1]] = ext
+
+            ltd = self.shlibs and self.links_to_dynamic(ext) or False
+            ns = ltd and use_stubs and not isinstance(ext, Library)
+            ext._links_to_dynamic = ltd
+            ext._needs_stub = ns
+            filename = ext._file_name = self.get_ext_filename(fullname)
+            libdir = os.path.dirname(os.path.join(self.build_lib, filename))
+            if ltd and libdir not in ext.library_dirs:
+                ext.library_dirs.append(libdir)
+            if ltd and use_stubs and os.curdir not in ext.runtime_library_dirs:
+                ext.runtime_library_dirs.append(os.curdir)
+
+        if self.editable_mode:
+            self.inplace = True
+
+    def setup_shlib_compiler(self):
+        compiler = self.shlib_compiler = new_compiler(
+            compiler=self.compiler, dry_run=self.dry_run, force=self.force
+        )
+        _customize_compiler_for_shlib(compiler)
+
+        if self.include_dirs is not None:
+            compiler.set_include_dirs(self.include_dirs)
+        if self.define is not None:
+            # 'define' option is a list of (name,value) tuples
+            for name, value in self.define:
+                compiler.define_macro(name, value)
+        if self.undef is not None:
+            for macro in self.undef:
+                compiler.undefine_macro(macro)
+        if self.libraries is not None:
+            compiler.set_libraries(self.libraries)
+        if self.library_dirs is not None:
+            compiler.set_library_dirs(self.library_dirs)
+        if self.rpath is not None:
+            compiler.set_runtime_library_dirs(self.rpath)
+        if self.link_objects is not None:
+            compiler.set_link_objects(self.link_objects)
+
+        # hack so distutils' build_extension() builds a library instead
+        compiler.link_shared_object = link_shared_object.__get__(compiler)  # type: ignore[method-assign]
+
+    def get_export_symbols(self, ext):
+        if isinstance(ext, Library):
+            return ext.export_symbols
+        return _build_ext.get_export_symbols(self, ext)
+
+    def build_extension(self, ext) -> None:
+        ext._convert_pyx_sources_to_lang()
+        _compiler = self.compiler
+        try:
+            if isinstance(ext, Library):
+                self.compiler = self.shlib_compiler
+            _build_ext.build_extension(self, ext)
+            if ext._needs_stub:
+                build_lib = self.get_finalized_command('build_py').build_lib
+                self.write_stub(build_lib, ext)
+        finally:
+            self.compiler = _compiler
+
+    def links_to_dynamic(self, ext):
+        """Return true if 'ext' links to a dynamic lib in the same package"""
+        # XXX this should check to ensure the lib is actually being built
+        # XXX as dynamic, and not just using a locally-found version or a
+        # XXX static-compiled version
+        libnames = dict.fromkeys([lib._full_name for lib in self.shlibs])
+        pkg = '.'.join(ext._full_name.split('.')[:-1] + [''])
+        return any(pkg + libname in libnames for libname in ext.libraries)
+
+    def get_source_files(self) -> list[str]:
+        return [*_build_ext.get_source_files(self), *self._get_internal_depends()]
+
+    def _get_internal_depends(self) -> Iterator[str]:
+        """Yield ``ext.depends`` that are contained by the project directory"""
+        project_root = Path(self.distribution.src_root or os.curdir).resolve()
+        depends = (dep for ext in self.extensions for dep in ext.depends)
+
+        def skip(orig_path: str, reason: str) -> None:
+            log.info(
+                "dependency %s won't be automatically "
+                "included in the manifest: the path %s",
+                orig_path,
+                reason,
+            )
+
+        for dep in depends:
+            path = Path(dep)
+
+            if path.is_absolute():
+                skip(dep, "must be relative")
+                continue
+
+            if ".." in path.parts:
+                skip(dep, "can't have `..` segments")
+                continue
+
+            try:
+                resolved = (project_root / path).resolve(strict=True)
+            except OSError:
+                skip(dep, "doesn't exist")
+                continue
+
+            try:
+                resolved.relative_to(project_root)
+            except ValueError:
+                skip(dep, "must be inside the project root")
+                continue
+
+            yield path.as_posix()
+
+    def get_outputs(self) -> list[str]:
+        if self.inplace:
+            return list(self.get_output_mapping().keys())
+        return sorted(_build_ext.get_outputs(self) + self.__get_stubs_outputs())
+
+    def get_output_mapping(self) -> dict[str, str]:
+        """See :class:`setuptools.commands.build.SubCommand`"""
+        mapping = self._get_output_mapping()
+        return dict(sorted(mapping, key=lambda x: x[0]))
+
+    def __get_stubs_outputs(self):
+        # assemble the base name for each extension that needs a stub
+        ns_ext_bases = (
+            os.path.join(self.build_lib, *ext._full_name.split('.'))
+            for ext in self.extensions
+            if ext._needs_stub
+        )
+        # pair each base with the extension
+        pairs = itertools.product(ns_ext_bases, self.__get_output_extensions())
+        return list(base + fnext for base, fnext in pairs)
+
+    def __get_output_extensions(self):
+        yield '.py'
+        yield '.pyc'
+        if self.get_finalized_command('build_py').optimize:
+            yield '.pyo'
+
+    def write_stub(self, output_dir, ext, compile=False) -> None:
+        stub_file = os.path.join(output_dir, *ext._full_name.split('.')) + '.py'
+        self._write_stub_file(stub_file, ext, compile)
+
+    def _write_stub_file(self, stub_file: str, ext: Extension, compile=False):
+        log.info("writing stub loader for %s to %s", ext._full_name, stub_file)
+        if compile and os.path.exists(stub_file):
+            raise BaseError(stub_file + " already exists! Please delete.")
+        if not self.dry_run:
+            with open(stub_file, 'w', encoding="utf-8") as f:
+                content = '\n'.join([
+                    "def __bootstrap__():",
+                    "   global __bootstrap__, __file__, __loader__",
+                    "   import sys, os, pkg_resources, importlib.util" + if_dl(", dl"),
+                    "   __file__ = pkg_resources.resource_filename"
+                    f"(__name__,{os.path.basename(ext._file_name)!r})",
+                    "   del __bootstrap__",
+                    "   if '__loader__' in globals():",
+                    "       del __loader__",
+                    if_dl("   old_flags = sys.getdlopenflags()"),
+                    "   old_dir = os.getcwd()",
+                    "   try:",
+                    "     os.chdir(os.path.dirname(__file__))",
+                    if_dl("     sys.setdlopenflags(dl.RTLD_NOW)"),
+                    "     spec = importlib.util.spec_from_file_location(",
+                    "                __name__, __file__)",
+                    "     mod = importlib.util.module_from_spec(spec)",
+                    "     spec.loader.exec_module(mod)",
+                    "   finally:",
+                    if_dl("     sys.setdlopenflags(old_flags)"),
+                    "     os.chdir(old_dir)",
+                    "__bootstrap__()",
+                    "",  # terminal \n
+                ])
+                f.write(content)
+        if compile:
+            self._compile_and_remove_stub(stub_file)
+
+    def _compile_and_remove_stub(self, stub_file: str):
+        from distutils.util import byte_compile
+
+        byte_compile([stub_file], optimize=0, force=True, dry_run=self.dry_run)
+        optimize = self.get_finalized_command('install_lib').optimize
+        if optimize > 0:
+            byte_compile(
+                [stub_file],
+                optimize=optimize,
+                force=True,
+                dry_run=self.dry_run,
+            )
+        if os.path.exists(stub_file) and not self.dry_run:
+            os.unlink(stub_file)
+
+
+if use_stubs or os.name == 'nt':
+    # Build shared libraries
+    #
+    def link_shared_object(
+        self,
+        objects,
+        output_libname,
+        output_dir=None,
+        libraries=None,
+        library_dirs=None,
+        runtime_library_dirs=None,
+        export_symbols=None,
+        debug: bool = False,
+        extra_preargs=None,
+        extra_postargs=None,
+        build_temp=None,
+        target_lang=None,
+    ) -> None:
+        self.link(
+            self.SHARED_LIBRARY,
+            objects,
+            output_libname,
+            output_dir,
+            libraries,
+            library_dirs,
+            runtime_library_dirs,
+            export_symbols,
+            debug,
+            extra_preargs,
+            extra_postargs,
+            build_temp,
+            target_lang,
+        )
+
+else:
+    # Build static libraries everywhere else
+    libtype = 'static'
+
+    def link_shared_object(
+        self,
+        objects,
+        output_libname,
+        output_dir=None,
+        libraries=None,
+        library_dirs=None,
+        runtime_library_dirs=None,
+        export_symbols=None,
+        debug: bool = False,
+        extra_preargs=None,
+        extra_postargs=None,
+        build_temp=None,
+        target_lang=None,
+    ) -> None:
+        # XXX we need to either disallow these attrs on Library instances,
+        # or warn/abort here if set, or something...
+        # libraries=None, library_dirs=None, runtime_library_dirs=None,
+        # export_symbols=None, extra_preargs=None, extra_postargs=None,
+        # build_temp=None
+
+        assert output_dir is None  # distutils build_ext doesn't pass this
+        output_dir, filename = os.path.split(output_libname)
+        basename, _ext = os.path.splitext(filename)
+        if self.library_filename("x").startswith('lib'):
+            # strip 'lib' prefix; this is kludgy if some platform uses
+            # a different prefix
+            basename = basename[3:]
+
+        self.create_static_lib(objects, basename, output_dir, debug, target_lang)
diff --git a/.venv/lib/python3.12/site-packages/setuptools/command/build_py.py b/.venv/lib/python3.12/site-packages/setuptools/command/build_py.py
new file mode 100644
index 00000000..2f6fcb7c
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/setuptools/command/build_py.py
@@ -0,0 +1,400 @@
+from __future__ import annotations
+
+import fnmatch
+import itertools
+import os
+import stat
+import textwrap
+from collections.abc import Iterable, Iterator
+from functools import partial
+from glob import glob
+from pathlib import Path
+
+from more_itertools import unique_everseen
+
+from .._path import StrPath, StrPathT
+from ..dist import Distribution
+from ..warnings import SetuptoolsDeprecationWarning
+
+import distutils.command.build_py as orig
+import distutils.errors
+from distutils.util import convert_path
+
+_IMPLICIT_DATA_FILES = ('*.pyi', 'py.typed')
+
+
+def make_writable(target) -> None:
+    os.chmod(target, os.stat(target).st_mode | stat.S_IWRITE)
+
+
+class build_py(orig.build_py):
+    """Enhanced 'build_py' command that includes data files with packages
+
+    The data files are specified via a 'package_data' argument to 'setup()'.
+    See 'setuptools.dist.Distribution' for more details.
+
+    Also, this version of the 'build_py' command allows you to specify both
+    'py_modules' and 'packages' in the same setup operation.
+    """
+
+    distribution: Distribution  # override distutils.dist.Distribution with setuptools.dist.Distribution
+    editable_mode: bool = False
+    existing_egg_info_dir: StrPath | None = None  #: Private API, internal use only.
+
+    def finalize_options(self):
+        orig.build_py.finalize_options(self)
+        self.package_data = self.distribution.package_data
+        self.exclude_package_data = self.distribution.exclude_package_data or {}
+        if 'data_files' in self.__dict__:
+            del self.__dict__['data_files']
+
+    def copy_file(  # type: ignore[override] # No overload, no bytes support
+        self,
+        infile: StrPath,
+        outfile: StrPathT,
+        preserve_mode: bool = True,
+        preserve_times: bool = True,
+        link: str | None = None,
+        level: object = 1,
+    ) -> tuple[StrPathT | str, bool]:
+        # Overwrite base class to allow using links
+        if link:
+            infile = str(Path(infile).resolve())
+            outfile = str(Path(outfile).resolve())  # type: ignore[assignment] # Re-assigning a str when outfile is StrPath is ok
+        return super().copy_file(  # pyright: ignore[reportReturnType] # pypa/distutils#309
+            infile, outfile, preserve_mode, preserve_times, link, level
+        )
+
+    def run(self) -> None:
+        """Build modules, packages, and copy data files to build directory"""
+        if not (self.py_modules or self.packages) or self.editable_mode:
+            return
+
+        if self.py_modules:
+            self.build_modules()
+
+        if self.packages:
+            self.build_packages()
+            self.build_package_data()
+
+        # Only compile actual .py files, using our base class' idea of what our
+        # output files are.
+        self.byte_compile(orig.build_py.get_outputs(self, include_bytecode=False))
+
+    def __getattr__(self, attr: str):
+        "lazily compute data files"
+        if attr == 'data_files':
+            self.data_files = self._get_data_files()
+            return self.data_files
+        return orig.build_py.__getattr__(self, attr)
+
+    def _get_data_files(self):
+        """Generate list of '(package,src_dir,build_dir,filenames)' tuples"""
+        self.analyze_manifest()
+        return list(map(self._get_pkg_data_files, self.packages or ()))
+
+    def get_data_files_without_manifest(self):
+        """
+        Generate list of ``(package,src_dir,build_dir,filenames)`` tuples,
+        but without triggering any attempt to analyze or build the manifest.
+        """
+        # Prevent eventual errors from unset `manifest_files`
+        # (that would otherwise be set by `analyze_manifest`)
+        self.__dict__.setdefault('manifest_files', {})
+        return list(map(self._get_pkg_data_files, self.packages or ()))
+
+    def _get_pkg_data_files(self, package):
+        # Locate package source directory
+        src_dir = self.get_package_dir(package)
+
+        # Compute package build directory
+        build_dir = os.path.join(*([self.build_lib] + package.split('.')))
+
+        # Strip directory from globbed filenames
+        filenames = [
+            os.path.relpath(file, src_dir)
+            for file in self.find_data_files(package, src_dir)
+        ]
+        return package, src_dir, build_dir, filenames
+
+    def find_data_files(self, package, src_dir):
+        """Return filenames for package's data files in 'src_dir'"""
+        patterns = self._get_platform_patterns(
+            self.package_data,
+            package,
+            src_dir,
+            extra_patterns=_IMPLICIT_DATA_FILES,
+        )
+        globs_expanded = map(partial(glob, recursive=True), patterns)
+        # flatten the expanded globs into an iterable of matches
+        globs_matches = itertools.chain.from_iterable(globs_expanded)
+        glob_files = filter(os.path.isfile, globs_matches)
+        files = itertools.chain(
+            self.manifest_files.get(package, []),
+            glob_files,
+        )
+        return self.exclude_data_files(package, src_dir, files)
+
+    def get_outputs(self, include_bytecode: bool = True) -> list[str]:  # type: ignore[override] # Using a real boolean instead of 0|1
+        """See :class:`setuptools.commands.build.SubCommand`"""
+        if self.editable_mode:
+            return list(self.get_output_mapping().keys())
+        return super().get_outputs(include_bytecode)
+
+    def get_output_mapping(self) -> dict[str, str]:
+        """See :class:`setuptools.commands.build.SubCommand`"""
+        mapping = itertools.chain(
+            self._get_package_data_output_mapping(),
+            self._get_module_mapping(),
+        )
+        return dict(sorted(mapping, key=lambda x: x[0]))
+
+    def _get_module_mapping(self) -> Iterator[tuple[str, str]]:
+        """Iterate over all modules producing (dest, src) pairs."""
+        for package, module, module_file in self.find_all_modules():
+            package = package.split('.')
+            filename = self.get_module_outfile(self.build_lib, package, module)
+            yield (filename, module_file)
+
+    def _get_package_data_output_mapping(self) -> Iterator[tuple[str, str]]:
+        """Iterate over package data producing (dest, src) pairs."""
+        for package, src_dir, build_dir, filenames in self.data_files:
+            for filename in filenames:
+                target = os.path.join(build_dir, filename)
+                srcfile = os.path.join(src_dir, filename)
+                yield (target, srcfile)
+
+    def build_package_data(self) -> None:
+        """Copy data files into build directory"""
+        for target, srcfile in self._get_package_data_output_mapping():
+            self.mkpath(os.path.dirname(target))
+            _outf, _copied = self.copy_file(srcfile, target)
+            make_writable(target)
+
+    def analyze_manifest(self) -> None:
+        self.manifest_files: dict[str, list[str]] = {}
+        if not self.distribution.include_package_data:
+            return
+        src_dirs: dict[str, str] = {}
+        for package in self.packages or ():
+            # Locate package source directory
+            src_dirs[assert_relative(self.get_package_dir(package))] = package
+
+        if (
+            self.existing_egg_info_dir
+            and Path(self.existing_egg_info_dir, "SOURCES.txt").exists()
+        ):
+            egg_info_dir = self.existing_egg_info_dir
+            manifest = Path(egg_info_dir, "SOURCES.txt")
+            files = manifest.read_text(encoding="utf-8").splitlines()
+        else:
+            self.run_command('egg_info')
+            ei_cmd = self.get_finalized_command('egg_info')
+            egg_info_dir = ei_cmd.egg_info
+            files = ei_cmd.filelist.files
+
+        check = _IncludePackageDataAbuse()
+        for path in self._filter_build_files(files, egg_info_dir):
+            d, f = os.path.split(assert_relative(path))
+            prev = None
+            oldf = f
+            while d and d != prev and d not in src_dirs:
+                prev = d
+                d, df = os.path.split(d)
+                f = os.path.join(df, f)
+            if d in src_dirs:
+                if f == oldf:
+                    if check.is_module(f):
+                        continue  # it's a module, not data
+                else:
+                    importable = check.importable_subpackage(src_dirs[d], f)
+                    if importable:
+                        check.warn(importable)
+                self.manifest_files.setdefault(src_dirs[d], []).append(path)
+
+    def _filter_build_files(
+        self, files: Iterable[str], egg_info: StrPath
+    ) -> Iterator[str]:
+        """
+        ``build_meta`` may try to create egg_info outside of the project directory,
+        and this can be problematic for certain plugins (reported in issue #3500).
+
+        Extensions might also include between their sources files created on the
+        ``build_lib`` and ``build_temp`` directories.
+
+        This function should filter this case of invalid files out.
+        """
+        build = self.get_finalized_command("build")
+        build_dirs = (egg_info, self.build_lib, build.build_temp, build.build_base)
+        norm_dirs = [os.path.normpath(p) for p in build_dirs if p]
+
+        for file in files:
+            norm_path = os.path.normpath(file)
+            if not os.path.isabs(file) or all(d not in norm_path for d in norm_dirs):
+                yield file
+
+    def get_data_files(self) -> None:
+        pass  # Lazily compute data files in _get_data_files() function.
+
+    def check_package(self, package, package_dir):
+        """Check namespace packages' __init__ for declare_namespace"""
+        try:
+            return self.packages_checked[package]
+        except KeyError:
+            pass
+
+        init_py = orig.build_py.check_package(self, package, package_dir)
+        self.packages_checked[package] = init_py
+
+        if not init_py or not self.distribution.namespace_packages:
+            return init_py
+
+        for pkg in self.distribution.namespace_packages:
+            if pkg == package or pkg.startswith(package + '.'):
+                break
+        else:
+            return init_py
+
+        with open(init_py, 'rb') as f:
+            contents = f.read()
+        if b'declare_namespace' not in contents:
+            raise distutils.errors.DistutilsError(
+                f"Namespace package problem: {package} is a namespace package, but "
+                "its\n__init__.py does not call declare_namespace()! Please "
+                'fix it.\n(See the setuptools manual under '
+                '"Namespace Packages" for details.)\n"'
+            )
+        return init_py
+
+    def initialize_options(self):
+        self.packages_checked = {}
+        orig.build_py.initialize_options(self)
+        self.editable_mode = False
+        self.existing_egg_info_dir = None
+
+    def get_package_dir(self, package):
+        res = orig.build_py.get_package_dir(self, package)
+        if self.distribution.src_root is not None:
+            return os.path.join(self.distribution.src_root, res)
+        return res
+
+    def exclude_data_files(self, package, src_dir, files):
+        """Filter filenames for package's data files in 'src_dir'"""
+        files = list(files)
+        patterns = self._get_platform_patterns(
+            self.exclude_package_data,
+            package,
+            src_dir,
+        )
+        match_groups = (fnmatch.filter(files, pattern) for pattern in patterns)
+        # flatten the groups of matches into an iterable of matches
+        matches = itertools.chain.from_iterable(match_groups)
+        bad = set(matches)
+        keepers = (fn for fn in files if fn not in bad)
+        # ditch dupes
+        return list(unique_everseen(keepers))
+
+    @staticmethod
+    def _get_platform_patterns(spec, package, src_dir, extra_patterns=()):
+        """
+        yield platform-specific path patterns (suitable for glob
+        or fn_match) from a glob-based spec (such as
+        self.package_data or self.exclude_package_data)
+        matching package in src_dir.
+        """
+        raw_patterns = itertools.chain(
+            extra_patterns,
+            spec.get('', []),
+            spec.get(package, []),
+        )
+        return (
+            # Each pattern has to be converted to a platform-specific path
+            os.path.join(src_dir, convert_path(pattern))
+            for pattern in raw_patterns
+        )
+
+
+def assert_relative(path):
+    if not os.path.isabs(path):
+        return path
+    from distutils.errors import DistutilsSetupError
+
+    msg = (
+        textwrap.dedent(
+            """
+        Error: setup script specifies an absolute path:
+
+            %s
+
+        setup() arguments must *always* be /-separated paths relative to the
+        setup.py directory, *never* absolute paths.
+        """
+        ).lstrip()
+        % path
+    )
+    raise DistutilsSetupError(msg)
+
+
+class _IncludePackageDataAbuse:
+    """Inform users that package or module is included as 'data file'"""
+
+    class _Warning(SetuptoolsDeprecationWarning):
+        _SUMMARY = """
+        Package {importable!r} is absent from the `packages` configuration.
+        """
+
+        _DETAILS = """
+        ############################
+        # Package would be ignored #
+        ############################
+        Python recognizes {importable!r} as an importable package[^1],
+        but it is absent from setuptools' `packages` configuration.
+
+        This leads to an ambiguous overall configuration. If you want to distribute this
+        package, please make sure that {importable!r} is explicitly added
+        to the `packages` configuration field.
+
+        Alternatively, you can also rely on setuptools' discovery methods
+        (for example by using `find_namespace_packages(...)`/`find_namespace:`
+        instead of `find_packages(...)`/`find:`).
+
+        You can read more about "package discovery" on setuptools documentation page:
+
+        - https://setuptools.pypa.io/en/latest/userguide/package_discovery.html
+
+        If you don't want {importable!r} to be distributed and are
+        already explicitly excluding {importable!r} via
+        `find_namespace_packages(...)/find_namespace` or `find_packages(...)/find`,
+        you can try to use `exclude_package_data`, or `include-package-data=False` in
+        combination with a more fine grained `package-data` configuration.
+
+        You can read more about "package data files" on setuptools documentation page:
+
+        - https://setuptools.pypa.io/en/latest/userguide/datafiles.html
+
+
+        [^1]: For Python, any directory (with suitable naming) can be imported,
+              even if it does not contain any `.py` files.
+              On the other hand, currently there is no concept of package data
+              directory, all directories are treated like packages.
+        """
+        # _DUE_DATE: still not defined as this is particularly controversial.
+        # Warning initially introduced in May 2022. See issue #3340 for discussion.
+
+    def __init__(self):
+        self._already_warned = set()
+
+    def is_module(self, file):
+        return file.endswith(".py") and file[: -len(".py")].isidentifier()
+
+    def importable_subpackage(self, parent, file):
+        pkg = Path(file).parent
+        parts = list(itertools.takewhile(str.isidentifier, pkg.parts))
+        if parts:
+            return ".".join([parent, *parts])
+        return None
+
+    def warn(self, importable):
+        if importable not in self._already_warned:
+            self._Warning.emit(importable=importable)
+            self._already_warned.add(importable)
diff --git a/.venv/lib/python3.12/site-packages/setuptools/command/develop.py b/.venv/lib/python3.12/site-packages/setuptools/command/develop.py
new file mode 100644
index 00000000..7eee29d4
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/setuptools/command/develop.py
@@ -0,0 +1,195 @@
+import glob
+import os
+
+import setuptools
+from setuptools import _normalization, _path, namespaces
+from setuptools.command.easy_install import easy_install
+
+from ..unicode_utils import _read_utf8_with_fallback
+
+from distutils import log
+from distutils.errors import DistutilsOptionError
+from distutils.util import convert_path
+
+
+class develop(namespaces.DevelopInstaller, easy_install):
+    """Set up package for development"""
+
+    description = "install package in 'development mode'"
+
+    user_options = easy_install.user_options + [
+        ("uninstall", "u", "Uninstall this source package"),
+        ("egg-path=", None, "Set the path to be used in the .egg-link file"),
+    ]
+
+    boolean_options = easy_install.boolean_options + ['uninstall']
+
+    command_consumes_arguments = False  # override base
+
+    def run(self):
+        if self.uninstall:
+            self.multi_version = True
+            self.uninstall_link()
+            self.uninstall_namespaces()
+        else:
+            self.install_for_development()
+        self.warn_deprecated_options()
+
+    def initialize_options(self):
+        self.uninstall = None
+        self.egg_path = None
+        easy_install.initialize_options(self)
+        self.setup_path = None
+        self.always_copy_from = '.'  # always copy eggs installed in curdir
+
+    def finalize_options(self) -> None:
+        import pkg_resources
+
+        ei = self.get_finalized_command("egg_info")
+        self.args = [ei.egg_name]
+
+        easy_install.finalize_options(self)
+        self.expand_basedirs()
+        self.expand_dirs()
+        # pick up setup-dir .egg files only: no .egg-info
+        self.package_index.scan(glob.glob('*.egg'))
+
+        egg_link_fn = (
+            _normalization.filename_component_broken(ei.egg_name) + '.egg-link'
+        )
+        self.egg_link = os.path.join(self.install_dir, egg_link_fn)
+        self.egg_base = ei.egg_base
+        if self.egg_path is None:
+            self.egg_path = os.path.abspath(ei.egg_base)
+
+        target = _path.normpath(self.egg_base)
+        egg_path = _path.normpath(os.path.join(self.install_dir, self.egg_path))
+        if egg_path != target:
+            raise DistutilsOptionError(
+                "--egg-path must be a relative path from the install"
+                " directory to " + target
+            )
+
+        # Make a distribution for the package's source
+        self.dist = pkg_resources.Distribution(
+            target,
+            pkg_resources.PathMetadata(target, os.path.abspath(ei.egg_info)),
+            project_name=ei.egg_name,
+        )
+
+        self.setup_path = self._resolve_setup_path(
+            self.egg_base,
+            self.install_dir,
+            self.egg_path,
+        )
+
+    @staticmethod
+    def _resolve_setup_path(egg_base, install_dir, egg_path):
+        """
+        Generate a path from egg_base back to '.' where the
+        setup script resides and ensure that path points to the
+        setup path from $install_dir/$egg_path.
+        """
+        path_to_setup = egg_base.replace(os.sep, '/').rstrip('/')
+        if path_to_setup != os.curdir:
+            path_to_setup = '../' * (path_to_setup.count('/') + 1)
+        resolved = _path.normpath(os.path.join(install_dir, egg_path, path_to_setup))
+        curdir = _path.normpath(os.curdir)
+        if resolved != curdir:
+            raise DistutilsOptionError(
+                "Can't get a consistent path to setup script from"
+                " installation directory",
+                resolved,
+                curdir,
+            )
+        return path_to_setup
+
+    def install_for_development(self) -> None:
+        self.run_command('egg_info')
+
+        # Build extensions in-place
+        self.reinitialize_command('build_ext', inplace=True)
+        self.run_command('build_ext')
+
+        if setuptools.bootstrap_install_from:
+            self.easy_install(setuptools.bootstrap_install_from)
+            setuptools.bootstrap_install_from = None
+
+        self.install_namespaces()
+
+        # create an .egg-link in the installation dir, pointing to our egg
+        log.info("Creating %s (link to %s)", self.egg_link, self.egg_base)
+        if not self.dry_run:
+            with open(self.egg_link, "w", encoding="utf-8") as f:
+                f.write(self.egg_path + "\n" + self.setup_path)
+        # postprocess the installed distro, fixing up .pth, installing scripts,
+        # and handling requirements
+        self.process_distribution(None, self.dist, not self.no_deps)
+
+    def uninstall_link(self) -> None:
+        if os.path.exists(self.egg_link):
+            log.info("Removing %s (link to %s)", self.egg_link, self.egg_base)
+
+            contents = [
+                line.rstrip()
+                for line in _read_utf8_with_fallback(self.egg_link).splitlines()
+            ]
+
+            if contents not in ([self.egg_path], [self.egg_path, self.setup_path]):
+                log.warn("Link points to %s: uninstall aborted", contents)
+                return
+            if not self.dry_run:
+                os.unlink(self.egg_link)
+        if not self.dry_run:
+            self.update_pth(self.dist)  # remove any .pth link to us
+        if self.distribution.scripts:
+            # XXX should also check for entry point scripts!
+            log.warn("Note: you must uninstall or replace scripts manually!")
+
+    def install_egg_scripts(self, dist):
+        if dist is not self.dist:
+            # Installing a dependency, so fall back to normal behavior
+            return easy_install.install_egg_scripts(self, dist)
+
+        # create wrapper scripts in the script dir, pointing to dist.scripts
+
+        # new-style...
+        self.install_wrapper_scripts(dist)
+
+        # ...and old-style
+        for script_name in self.distribution.scripts or []:
+            script_path = os.path.abspath(convert_path(script_name))
+            script_name = os.path.basename(script_path)
+            script_text = _read_utf8_with_fallback(script_path)
+            self.install_script(dist, script_name, script_text, script_path)
+
+        return None
+
+    def install_wrapper_scripts(self, dist):
+        dist = VersionlessRequirement(dist)
+        return easy_install.install_wrapper_scripts(self, dist)
+
+
+class VersionlessRequirement:
+    """
+    Adapt a pkg_resources.Distribution to simply return the project
+    name as the 'requirement' so that scripts will work across
+    multiple versions.
+
+    >>> from pkg_resources import Distribution
+    >>> dist = Distribution(project_name='foo', version='1.0')
+    >>> str(dist.as_requirement())
+    'foo==1.0'
+    >>> adapted_dist = VersionlessRequirement(dist)
+    >>> str(adapted_dist.as_requirement())
+    'foo'
+    """
+
+    def __init__(self, dist) -> None:
+        self.__dist = dist
+
+    def __getattr__(self, name: str):
+        return getattr(self.__dist, name)
+
+    def as_requirement(self):
+        return self.project_name
diff --git a/.venv/lib/python3.12/site-packages/setuptools/command/dist_info.py b/.venv/lib/python3.12/site-packages/setuptools/command/dist_info.py
new file mode 100644
index 00000000..dca01ff0
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/setuptools/command/dist_info.py
@@ -0,0 +1,103 @@
+"""
+Create a dist_info directory
+As defined in the wheel specification
+"""
+
+import os
+import shutil
+from contextlib import contextmanager
+from pathlib import Path
+from typing import cast
+
+from .. import _normalization
+from .._shutil import rmdir as _rm
+from .egg_info import egg_info as egg_info_cls
+
+from distutils import log
+from distutils.core import Command
+
+
+class dist_info(Command):
+    """
+    This command is private and reserved for internal use of setuptools,
+    users should rely on ``setuptools.build_meta`` APIs.
+    """
+
+    description = "DO NOT CALL DIRECTLY, INTERNAL ONLY: create .dist-info directory"
+
+    user_options = [
+        (
+            'output-dir=',
+            'o',
+            "directory inside of which the .dist-info will be"
+            "created [default: top of the source tree]",
+        ),
+        ('tag-date', 'd', "Add date stamp (e.g. 20050528) to version number"),
+        ('tag-build=', 'b', "Specify explicit tag to add to version number"),
+        ('no-date', 'D', "Don't include date stamp [default]"),
+        ('keep-egg-info', None, "*TRANSITIONAL* will be removed in the future"),
+    ]
+
+    boolean_options = ['tag-date', 'keep-egg-info']
+    negative_opt = {'no-date': 'tag-date'}
+
+    def initialize_options(self):
+        self.output_dir = None
+        self.name = None
+        self.dist_info_dir = None
+        self.tag_date = None
+        self.tag_build = None
+        self.keep_egg_info = False
+
+    def finalize_options(self) -> None:
+        dist = self.distribution
+        project_dir = dist.src_root or os.curdir
+        self.output_dir = Path(self.output_dir or project_dir)
+
+        egg_info = cast(egg_info_cls, self.reinitialize_command("egg_info"))
+        egg_info.egg_base = str(self.output_dir)
+
+        if self.tag_date:
+            egg_info.tag_date = self.tag_date
+        else:
+            self.tag_date = egg_info.tag_date
+
+        if self.tag_build:
+            egg_info.tag_build = self.tag_build
+        else:
+            self.tag_build = egg_info.tag_build
+
+        egg_info.finalize_options()
+        self.egg_info = egg_info
+
+        name = _normalization.safer_name(dist.get_name())
+        version = _normalization.safer_best_effort_version(dist.get_version())
+        self.name = f"{name}-{version}"
+        self.dist_info_dir = os.path.join(self.output_dir, f"{self.name}.dist-info")
+
+    @contextmanager
+    def _maybe_bkp_dir(self, dir_path: str, requires_bkp: bool):
+        if requires_bkp:
+            bkp_name = f"{dir_path}.__bkp__"
+            _rm(bkp_name, ignore_errors=True)
+            shutil.copytree(dir_path, bkp_name, dirs_exist_ok=True, symlinks=True)
+            try:
+                yield
+            finally:
+                _rm(dir_path, ignore_errors=True)
+                shutil.move(bkp_name, dir_path)
+        else:
+            yield
+
+    def run(self) -> None:
+        self.output_dir.mkdir(parents=True, exist_ok=True)
+        self.egg_info.run()
+        egg_info_dir = self.egg_info.egg_info
+        assert os.path.isdir(egg_info_dir), ".egg-info dir should have been created"
+
+        log.info(f"creating '{os.path.abspath(self.dist_info_dir)}'")
+        bdist_wheel = self.get_finalized_command('bdist_wheel')
+
+        # TODO: if bdist_wheel if merged into setuptools, just add "keep_egg_info" there
+        with self._maybe_bkp_dir(egg_info_dir, self.keep_egg_info):
+            bdist_wheel.egg2dist(egg_info_dir, self.dist_info_dir)
diff --git a/.venv/lib/python3.12/site-packages/setuptools/command/easy_install.py b/.venv/lib/python3.12/site-packages/setuptools/command/easy_install.py
new file mode 100644
index 00000000..eb1b4c1f
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/setuptools/command/easy_install.py
@@ -0,0 +1,2365 @@
+"""
+Easy Install
+------------
+
+A tool for doing automatic download/extract/build of distutils-based Python
+packages.  For detailed documentation, see the accompanying EasyInstall.txt
+file, or visit the `EasyInstall home page`__.
+
+__ https://setuptools.pypa.io/en/latest/deprecated/easy_install.html
+
+"""
+
+from __future__ import annotations
+
+import configparser
+import contextlib
+import io
+import os
+import random
+import re
+import shlex
+import shutil
+import site
+import stat
+import struct
+import subprocess
+import sys
+import sysconfig
+import tempfile
+import textwrap
+import warnings
+import zipfile
+import zipimport
+from collections.abc import Iterable
+from glob import glob
+from sysconfig import get_path
+from typing import TYPE_CHECKING, NoReturn, TypedDict
+
+from jaraco.text import yield_lines
+
+import pkg_resources
+from pkg_resources import (
+    DEVELOP_DIST,
+    Distribution,
+    DistributionNotFound,
+    EggMetadata,
+    Environment,
+    PathMetadata,
+    Requirement,
+    VersionConflict,
+    WorkingSet,
+    find_distributions,
+    get_distribution,
+    normalize_path,
+    resource_string,
+)
+from setuptools import Command
+from setuptools.archive_util import unpack_archive
+from setuptools.command import bdist_egg, egg_info, setopt
+from setuptools.package_index import URL_SCHEME, PackageIndex, parse_requirement_arg
+from setuptools.sandbox import run_setup
+from setuptools.warnings import SetuptoolsDeprecationWarning, SetuptoolsWarning
+from setuptools.wheel import Wheel
+
+from .._path import ensure_directory
+from .._shutil import attempt_chmod_verbose as chmod, rmtree as _rmtree
+from ..compat import py39, py312
+
+from distutils import dir_util, log
+from distutils.command import install
+from distutils.command.build_scripts import first_line_re
+from distutils.errors import (
+    DistutilsArgError,
+    DistutilsError,
+    DistutilsOptionError,
+    DistutilsPlatformError,
+)
+from distutils.util import convert_path, get_platform, subst_vars
+
+if TYPE_CHECKING:
+    from typing_extensions import Self
+
+# Turn on PEP440Warnings
+warnings.filterwarnings("default", category=pkg_resources.PEP440Warning)
+
+__all__ = [
+    'easy_install',
+    'PthDistributions',
+    'extract_wininst_cfg',
+    'get_exe_prefixes',
+]
+
+
+def is_64bit():
+    return struct.calcsize("P") == 8
+
+
+def _to_bytes(s):
+    return s.encode('utf8')
+
+
+def isascii(s):
+    try:
+        s.encode('ascii')
+    except UnicodeError:
+        return False
+    return True
+
+
+def _one_liner(text):
+    return textwrap.dedent(text).strip().replace('\n', '; ')
+
+
+class easy_install(Command):
+    """Manage a download/build/install process"""
+
+    description = "Find/get/install Python packages"
+    command_consumes_arguments = True
+
+    user_options = [
+        ('prefix=', None, "installation prefix"),
+        ("zip-ok", "z", "install package as a zipfile"),
+        ("multi-version", "m", "make apps have to require() a version"),
+        ("upgrade", "U", "force upgrade (searches PyPI for latest versions)"),
+        ("install-dir=", "d", "install package to DIR"),
+        ("script-dir=", "s", "install scripts to DIR"),
+        ("exclude-scripts", "x", "Don't install scripts"),
+        ("always-copy", "a", "Copy all needed packages to install dir"),
+        ("index-url=", "i", "base URL of Python Package Index"),
+        ("find-links=", "f", "additional URL(s) to search for packages"),
+        ("build-directory=", "b", "download/extract/build in DIR; keep the results"),
+        (
+            'optimize=',
+            'O',
+            'also compile with optimization: -O1 for "python -O", '
+            '-O2 for "python -OO", and -O0 to disable [default: -O0]',
+        ),
+        ('record=', None, "filename in which to record list of installed files"),
+        ('always-unzip', 'Z', "don't install as a zipfile, no matter what"),
+        ('site-dirs=', 'S', "list of directories where .pth files work"),
+        ('editable', 'e', "Install specified packages in editable form"),
+        ('no-deps', 'N', "don't install dependencies"),
+        ('allow-hosts=', 'H', "pattern(s) that hostnames must match"),
+        ('local-snapshots-ok', 'l', "allow building eggs from local checkouts"),
+        ('version', None, "print version information and exit"),
+        (
+            'no-find-links',
+            None,
+            "Don't load find-links defined in packages being installed",
+        ),
+        ('user', None, f"install in user site-package '{site.USER_SITE}'"),
+    ]
+    boolean_options = [
+        'zip-ok',
+        'multi-version',
+        'exclude-scripts',
+        'upgrade',
+        'always-copy',
+        'editable',
+        'no-deps',
+        'local-snapshots-ok',
+        'version',
+        'user',
+    ]
+
+    negative_opt = {'always-unzip': 'zip-ok'}
+    create_index = PackageIndex
+
+    def initialize_options(self):
+        EasyInstallDeprecationWarning.emit()
+
+        # the --user option seems to be an opt-in one,
+        # so the default should be False.
+        self.user = False
+        self.zip_ok = self.local_snapshots_ok = None
+        self.install_dir = self.script_dir = self.exclude_scripts = None
+        self.index_url = None
+        self.find_links = None
+        self.build_directory = None
+        self.args = None
+        self.optimize = self.record = None
+        self.upgrade = self.always_copy = self.multi_version = None
+        self.editable = self.no_deps = self.allow_hosts = None
+        self.root = self.prefix = self.no_report = None
+        self.version = None
+        self.install_purelib = None  # for pure module distributions
+        self.install_platlib = None  # non-pure (dists w/ extensions)
+        self.install_headers = None  # for C/C++ headers
+        self.install_lib = None  # set to either purelib or platlib
+        self.install_scripts = None
+        self.install_data = None
+        self.install_base = None
+        self.install_platbase = None
+        self.install_userbase = site.USER_BASE
+        self.install_usersite = site.USER_SITE
+        self.no_find_links = None
+
+        # Options not specifiable via command line
+        self.package_index = None
+        self.pth_file = self.always_copy_from = None
+        self.site_dirs = None
+        self.installed_projects = {}
+        # Always read easy_install options, even if we are subclassed, or have
+        # an independent instance created.  This ensures that defaults will
+        # always come from the standard configuration file(s)' "easy_install"
+        # section, even if this is a "develop" or "install" command, or some
+        # other embedding.
+        self._dry_run = None
+        self.verbose = self.distribution.verbose
+        self.distribution._set_command_options(
+            self, self.distribution.get_option_dict('easy_install')
+        )
+
+    def delete_blockers(self, blockers) -> None:
+        extant_blockers = (
+            filename
+            for filename in blockers
+            if os.path.exists(filename) or os.path.islink(filename)
+        )
+        list(map(self._delete_path, extant_blockers))
+
+    def _delete_path(self, path):
+        log.info("Deleting %s", path)
+        if self.dry_run:
+            return
+
+        is_tree = os.path.isdir(path) and not os.path.islink(path)
+        remover = _rmtree if is_tree else os.unlink
+        remover(path)
+
+    @staticmethod
+    def _render_version():
+        """
+        Render the Setuptools version and installation details, then exit.
+        """
+        ver = f'{sys.version_info.major}.{sys.version_info.minor}'
+        dist = get_distribution('setuptools')
+        print(f'setuptools {dist.version} from {dist.location} (Python {ver})')
+        raise SystemExit
+
+    def finalize_options(self) -> None:  # noqa: C901  # is too complex (25)  # FIXME
+        self.version and self._render_version()
+
+        py_version = sys.version.split()[0]
+
+        self.config_vars = dict(sysconfig.get_config_vars())
+
+        self.config_vars.update({
+            'dist_name': self.distribution.get_name(),
+            'dist_version': self.distribution.get_version(),
+            'dist_fullname': self.distribution.get_fullname(),
+            'py_version': py_version,
+            'py_version_short': f'{sys.version_info.major}.{sys.version_info.minor}',
+            'py_version_nodot': f'{sys.version_info.major}{sys.version_info.minor}',
+            'sys_prefix': self.config_vars['prefix'],
+            'sys_exec_prefix': self.config_vars['exec_prefix'],
+            # Only POSIX systems have abiflags
+            'abiflags': getattr(sys, 'abiflags', ''),
+            # Only python 3.9+ has platlibdir
+            'platlibdir': getattr(sys, 'platlibdir', 'lib'),
+        })
+        with contextlib.suppress(AttributeError):
+            # only for distutils outside stdlib
+            self.config_vars.update({
+                'implementation_lower': install._get_implementation().lower(),
+                'implementation': install._get_implementation(),
+            })
+
+        # pypa/distutils#113 Python 3.9 compat
+        self.config_vars.setdefault(
+            'py_version_nodot_plat',
+            getattr(sys, 'windir', '').replace('.', ''),
+        )
+
+        self.config_vars['userbase'] = self.install_userbase
+        self.config_vars['usersite'] = self.install_usersite
+        if self.user and not site.ENABLE_USER_SITE:
+            log.warn("WARNING: The user site-packages directory is disabled.")
+
+        self._fix_install_dir_for_user_site()
+
+        self.expand_basedirs()
+        self.expand_dirs()
+
+        self._expand(
+            'install_dir',
+            'script_dir',
+            'build_directory',
+            'site_dirs',
+        )
+        # If a non-default installation directory was specified, default the
+        # script directory to match it.
+        if self.script_dir is None:
+            self.script_dir = self.install_dir
+
+        if self.no_find_links is None:
+            self.no_find_links = False
+
+        # Let install_dir get set by install_lib command, which in turn
+        # gets its info from the install command, and takes into account
+        # --prefix and --home and all that other crud.
+        self.set_undefined_options('install_lib', ('install_dir', 'install_dir'))
+        # Likewise, set default script_dir from 'install_scripts.install_dir'
+        self.set_undefined_options('install_scripts', ('install_dir', 'script_dir'))
+
+        if self.user and self.install_purelib:
+            self.install_dir = self.install_purelib
+            self.script_dir = self.install_scripts
+        # default --record from the install command
+        self.set_undefined_options('install', ('record', 'record'))
+        self.all_site_dirs = get_site_dirs()
+        self.all_site_dirs.extend(self._process_site_dirs(self.site_dirs))
+
+        if not self.editable:
+            self.check_site_dir()
+        default_index = os.getenv("__EASYINSTALL_INDEX", "https://pypi.org/simple/")
+        # ^ Private API for testing purposes only
+        self.index_url = self.index_url or default_index
+        self.shadow_path = self.all_site_dirs[:]
+        for path_item in self.install_dir, normalize_path(self.script_dir):
+            if path_item not in self.shadow_path:
+                self.shadow_path.insert(0, path_item)
+
+        if self.allow_hosts is not None:
+            hosts = [s.strip() for s in self.allow_hosts.split(',')]
+        else:
+            hosts = ['*']
+        if self.package_index is None:
+            self.package_index = self.create_index(
+                self.index_url,
+                search_path=self.shadow_path,
+                hosts=hosts,
+            )
+        self.local_index = Environment(self.shadow_path + sys.path)
+
+        if self.find_links is not None:
+            if isinstance(self.find_links, str):
+                self.find_links = self.find_links.split()
+        else:
+            self.find_links = []
+        if self.local_snapshots_ok:
+            self.package_index.scan_egg_links(self.shadow_path + sys.path)
+        if not self.no_find_links:
+            self.package_index.add_find_links(self.find_links)
+        self.set_undefined_options('install_lib', ('optimize', 'optimize'))
+        self.optimize = self._validate_optimize(self.optimize)
+
+        if self.editable and not self.build_directory:
+            raise DistutilsArgError(
+                "Must specify a build directory (-b) when using --editable"
+            )
+        if not self.args:
+            raise DistutilsArgError(
+                "No urls, filenames, or requirements specified (see --help)"
+            )
+
+        self.outputs: list[str] = []
+
+    @staticmethod
+    def _process_site_dirs(site_dirs):
+        if site_dirs is None:
+            return
+
+        normpath = map(normalize_path, sys.path)
+        site_dirs = [os.path.expanduser(s.strip()) for s in site_dirs.split(',')]
+        for d in site_dirs:
+            if not os.path.isdir(d):
+                log.warn("%s (in --site-dirs) does not exist", d)
+            elif normalize_path(d) not in normpath:
+                raise DistutilsOptionError(d + " (in --site-dirs) is not on sys.path")
+            else:
+                yield normalize_path(d)
+
+    @staticmethod
+    def _validate_optimize(value):
+        try:
+            value = int(value)
+            if value not in range(3):
+                raise ValueError
+        except ValueError as e:
+            raise DistutilsOptionError("--optimize must be 0, 1, or 2") from e
+
+        return value
+
+    def _fix_install_dir_for_user_site(self):
+        """
+        Fix the install_dir if "--user" was used.
+        """
+        if not self.user:
+            return
+
+        self.create_home_path()
+        if self.install_userbase is None:
+            msg = "User base directory is not specified"
+            raise DistutilsPlatformError(msg)
+        self.install_base = self.install_platbase = self.install_userbase
+        scheme_name = f'{os.name}_user'
+        self.select_scheme(scheme_name)
+
+    def _expand_attrs(self, attrs):
+        for attr in attrs:
+            val = getattr(self, attr)
+            if val is not None:
+                if os.name == 'posix' or os.name == 'nt':
+                    val = os.path.expanduser(val)
+                val = subst_vars(val, self.config_vars)
+                setattr(self, attr, val)
+
+    def expand_basedirs(self) -> None:
+        """Calls `os.path.expanduser` on install_base, install_platbase and
+        root."""
+        self._expand_attrs(['install_base', 'install_platbase', 'root'])
+
+    def expand_dirs(self) -> None:
+        """Calls `os.path.expanduser` on install dirs."""
+        dirs = [
+            'install_purelib',
+            'install_platlib',
+            'install_lib',
+            'install_headers',
+            'install_scripts',
+            'install_data',
+        ]
+        self._expand_attrs(dirs)
+
+    def run(self, show_deprecation: bool = True) -> None:
+        if show_deprecation:
+            self.announce(
+                "WARNING: The easy_install command is deprecated "
+                "and will be removed in a future version.",
+                log.WARN,
+            )
+        if self.verbose != self.distribution.verbose:
+            log.set_verbosity(self.verbose)
+        try:
+            for spec in self.args:
+                self.easy_install(spec, not self.no_deps)
+            if self.record:
+                outputs = self.outputs
+                if self.root:  # strip any package prefix
+                    root_len = len(self.root)
+                    for counter in range(len(outputs)):
+                        outputs[counter] = outputs[counter][root_len:]
+                from distutils import file_util
+
+                self.execute(
+                    file_util.write_file,
+                    (self.record, outputs),
+                    f"writing list of installed files to '{self.record}'",
+                )
+            self.warn_deprecated_options()
+        finally:
+            log.set_verbosity(self.distribution.verbose)
+
+    def pseudo_tempname(self):
+        """Return a pseudo-tempname base in the install directory.
+        This code is intentionally naive; if a malicious party can write to
+        the target directory you're already in deep doodoo.
+        """
+        try:
+            pid = os.getpid()
+        except Exception:
+            pid = random.randint(0, sys.maxsize)
+        return os.path.join(self.install_dir, f"test-easy-install-{pid}")
+
+    def warn_deprecated_options(self) -> None:
+        pass
+
+    def check_site_dir(self) -> None:  # is too complex (12)  # FIXME
+        """Verify that self.install_dir is .pth-capable dir, if needed"""
+
+        instdir = normalize_path(self.install_dir)
+        pth_file = os.path.join(instdir, 'easy-install.pth')
+
+        if not os.path.exists(instdir):
+            try:
+                os.makedirs(instdir)
+            except OSError:
+                self.cant_write_to_target()
+
+        # Is it a configured, PYTHONPATH, implicit, or explicit site dir?
+        is_site_dir = instdir in self.all_site_dirs
+
+        if not is_site_dir and not self.multi_version:
+            # No?  Then directly test whether it does .pth file processing
+            is_site_dir = self.check_pth_processing()
+        else:
+            # make sure we can write to target dir
+            testfile = self.pseudo_tempname() + '.write-test'
+            test_exists = os.path.exists(testfile)
+            try:
+                if test_exists:
+                    os.unlink(testfile)
+                open(testfile, 'wb').close()
+                os.unlink(testfile)
+            except OSError:
+                self.cant_write_to_target()
+
+        if not is_site_dir and not self.multi_version:
+            # Can't install non-multi to non-site dir with easy_install
+            pythonpath = os.environ.get('PYTHONPATH', '')
+            log.warn(self.__no_default_msg, self.install_dir, pythonpath)
+
+        if is_site_dir:
+            if self.pth_file is None:
+                self.pth_file = PthDistributions(pth_file, self.all_site_dirs)
+        else:
+            self.pth_file = None
+
+        if self.multi_version and not os.path.exists(pth_file):
+            self.pth_file = None  # don't create a .pth file
+        self.install_dir = instdir
+
+    __cant_write_msg = textwrap.dedent(
+        """
+        can't create or remove files in install directory
+
+        The following error occurred while trying to add or remove files in the
+        installation directory:
+
+            %s
+
+        The installation directory you specified (via --install-dir, --prefix, or
+        the distutils default setting) was:
+
+            %s
+        """
+    ).lstrip()
+
+    __not_exists_id = textwrap.dedent(
+        """
+        This directory does not currently exist.  Please create it and try again, or
+        choose a different installation directory (using the -d or --install-dir
+        option).
+        """
+    ).lstrip()
+
+    __access_msg = textwrap.dedent(
+        """
+        Perhaps your account does not have write access to this directory?  If the
+        installation directory is a system-owned directory, you may need to sign in
+        as the administrator or "root" account.  If you do not have administrative
+        access to this machine, you may wish to choose a different installation
+        directory, preferably one that is listed in your PYTHONPATH environment
+        variable.
+
+        For information on other options, you may wish to consult the
+        documentation at:
+
+          https://setuptools.pypa.io/en/latest/deprecated/easy_install.html
+
+        Please make the appropriate changes for your system and try again.
+        """
+    ).lstrip()
+
+    def cant_write_to_target(self) -> NoReturn:
+        msg = self.__cant_write_msg % (
+            sys.exc_info()[1],
+            self.install_dir,
+        )
+
+        if not os.path.exists(self.install_dir):
+            msg += '\n' + self.__not_exists_id
+        else:
+            msg += '\n' + self.__access_msg
+        raise DistutilsError(msg)
+
+    def check_pth_processing(self):  # noqa: C901
+        """Empirically verify whether .pth files are supported in inst. dir"""
+        instdir = self.install_dir
+        log.info("Checking .pth file support in %s", instdir)
+        pth_file = self.pseudo_tempname() + ".pth"
+        ok_file = pth_file + '.ok'
+        ok_exists = os.path.exists(ok_file)
+        tmpl = (
+            _one_liner(
+                """
+            import os
+            f = open({ok_file!r}, 'w', encoding="utf-8")
+            f.write('OK')
+            f.close()
+            """
+            )
+            + '\n'
+        )
+        try:
+            if ok_exists:
+                os.unlink(ok_file)
+            dirname = os.path.dirname(ok_file)
+            os.makedirs(dirname, exist_ok=True)
+            f = open(pth_file, 'w', encoding=py312.PTH_ENCODING)
+            # ^-- Python<3.13 require encoding="locale" instead of "utf-8",
+            #     see python/cpython#77102.
+        except OSError:
+            self.cant_write_to_target()
+        else:
+            try:
+                f.write(tmpl.format(**locals()))
+                f.close()
+                f = None
+                executable = sys.executable
+                if os.name == 'nt':
+                    dirname, basename = os.path.split(executable)
+                    alt = os.path.join(dirname, 'pythonw.exe')
+                    use_alt = basename.lower() == 'python.exe' and os.path.exists(alt)
+                    if use_alt:
+                        # use pythonw.exe to avoid opening a console window
+                        executable = alt
+
+                from distutils.spawn import spawn
+
+                spawn([executable, '-E', '-c', 'pass'], 0)
+
+                if os.path.exists(ok_file):
+                    log.info("TEST PASSED: %s appears to support .pth files", instdir)
+                    return True
+            finally:
+                if f:
+                    f.close()
+                if os.path.exists(ok_file):
+                    os.unlink(ok_file)
+                if os.path.exists(pth_file):
+                    os.unlink(pth_file)
+        if not self.multi_version:
+            log.warn("TEST FAILED: %s does NOT support .pth files", instdir)
+        return False
+
+    def install_egg_scripts(self, dist) -> None:
+        """Write all the scripts for `dist`, unless scripts are excluded"""
+        if not self.exclude_scripts and dist.metadata_isdir('scripts'):
+            for script_name in dist.metadata_listdir('scripts'):
+                if dist.metadata_isdir('scripts/' + script_name):
+                    # The "script" is a directory, likely a Python 3
+                    # __pycache__ directory, so skip it.
+                    continue
+                self.install_script(
+                    dist, script_name, dist.get_metadata('scripts/' + script_name)
+                )
+        self.install_wrapper_scripts(dist)
+
+    def add_output(self, path) -> None:
+        if os.path.isdir(path):
+            for base, dirs, files in os.walk(path):
+                for filename in files:
+                    self.outputs.append(os.path.join(base, filename))
+        else:
+            self.outputs.append(path)
+
+    def not_editable(self, spec) -> None:
+        if self.editable:
+            raise DistutilsArgError(
+                f"Invalid argument {spec!r}: you can't use filenames or URLs "
+                "with --editable (except via the --find-links option)."
+            )
+
+    def check_editable(self, spec) -> None:
+        if not self.editable:
+            return
+
+        if os.path.exists(os.path.join(self.build_directory, spec.key)):
+            raise DistutilsArgError(
+                f"{spec.key!r} already exists in {self.build_directory}; can't do a checkout there"
+            )
+
+    @contextlib.contextmanager
+    def _tmpdir(self):
+        tmpdir = tempfile.mkdtemp(prefix="easy_install-")
+        try:
+            # cast to str as workaround for #709 and #710 and #712
+            yield str(tmpdir)
+        finally:
+            os.path.exists(tmpdir) and _rmtree(tmpdir)
+
+    def easy_install(self, spec, deps: bool = False) -> Distribution | None:
+        with self._tmpdir() as tmpdir:
+            if not isinstance(spec, Requirement):
+                if URL_SCHEME(spec):
+                    # It's a url, download it to tmpdir and process
+                    self.not_editable(spec)
+                    dl = self.package_index.download(spec, tmpdir)
+                    return self.install_item(None, dl, tmpdir, deps, True)
+
+                elif os.path.exists(spec):
+                    # Existing file or directory, just process it directly
+                    self.not_editable(spec)
+                    return self.install_item(None, spec, tmpdir, deps, True)
+                else:
+                    spec = parse_requirement_arg(spec)
+
+            self.check_editable(spec)
+            dist = self.package_index.fetch_distribution(
+                spec,
+                tmpdir,
+                self.upgrade,
+                self.editable,
+                not self.always_copy,
+                self.local_index,
+            )
+            if dist is None:
+                msg = f"Could not find suitable distribution for {spec!r}"
+                if self.always_copy:
+                    msg += " (--always-copy skips system and development eggs)"
+                raise DistutilsError(msg)
+            elif dist.precedence == DEVELOP_DIST:
+                # .egg-info dists don't need installing, just process deps
+                self.process_distribution(spec, dist, deps, "Using")
+                return dist
+            else:
+                return self.install_item(spec, dist.location, tmpdir, deps)
+
+    def install_item(
+        self, spec, download, tmpdir, deps, install_needed: bool = False
+    ) -> Distribution | None:
+        # Installation is also needed if file in tmpdir or is not an egg
+        install_needed = install_needed or bool(self.always_copy)
+        install_needed = install_needed or os.path.dirname(download) == tmpdir
+        install_needed = install_needed or not download.endswith('.egg')
+        install_needed = install_needed or (
+            self.always_copy_from is not None
+            and os.path.dirname(normalize_path(download))
+            == normalize_path(self.always_copy_from)
+        )
+
+        if spec and not install_needed:
+            # at this point, we know it's a local .egg, we just don't know if
+            # it's already installed.
+            for dist in self.local_index[spec.project_name]:
+                if dist.location == download:
+                    break
+            else:
+                install_needed = True  # it's not in the local index
+
+        log.info("Processing %s", os.path.basename(download))
+
+        if install_needed:
+            dists = self.install_eggs(spec, download, tmpdir)
+            for dist in dists:
+                self.process_distribution(spec, dist, deps)
+        else:
+            dists = [self.egg_distribution(download)]
+            self.process_distribution(spec, dists[0], deps, "Using")
+
+        if spec is not None:
+            for dist in dists:
+                if dist in spec:
+                    return dist
+        return None
+
+    def select_scheme(self, name):
+        try:
+            install._select_scheme(self, name)
+        except AttributeError:
+            # stdlib distutils
+            install.install.select_scheme(self, name.replace('posix', 'unix'))
+
+    # FIXME: 'easy_install.process_distribution' is too complex (12)
+    def process_distribution(  # noqa: C901
+        self,
+        requirement,
+        dist,
+        deps: bool = True,
+        *info,
+    ) -> None:
+        self.update_pth(dist)
+        self.package_index.add(dist)
+        if dist in self.local_index[dist.key]:
+            self.local_index.remove(dist)
+        self.local_index.add(dist)
+        self.install_egg_scripts(dist)
+        self.installed_projects[dist.key] = dist
+        log.info(self.installation_report(requirement, dist, *info))
+        if dist.has_metadata('dependency_links.txt') and not self.no_find_links:
+            self.package_index.add_find_links(
+                dist.get_metadata_lines('dependency_links.txt')
+            )
+        if not deps and not self.always_copy:
+            return
+        elif requirement is not None and dist.key != requirement.key:
+            log.warn("Skipping dependencies for %s", dist)
+            return  # XXX this is not the distribution we were looking for
+        elif requirement is None or dist not in requirement:
+            # if we wound up with a different version, resolve what we've got
+            distreq = dist.as_requirement()
+            requirement = Requirement(str(distreq))
+        log.info("Processing dependencies for %s", requirement)
+        try:
+            distros = WorkingSet([]).resolve(
+                [requirement], self.local_index, self.easy_install
+            )
+        except DistributionNotFound as e:
+            raise DistutilsError(str(e)) from e
+        except VersionConflict as e:
+            raise DistutilsError(e.report()) from e
+        if self.always_copy or self.always_copy_from:
+            # Force all the relevant distros to be copied or activated
+            for dist in distros:
+                if dist.key not in self.installed_projects:
+                    self.easy_install(dist.as_requirement())
+        log.info("Finished processing dependencies for %s", requirement)
+
+    def should_unzip(self, dist) -> bool:
+        if self.zip_ok is not None:
+            return not self.zip_ok
+        if dist.has_metadata('not-zip-safe'):
+            return True
+        if not dist.has_metadata('zip-safe'):
+            return True
+        return False
+
+    def maybe_move(self, spec, dist_filename, setup_base):
+        dst = os.path.join(self.build_directory, spec.key)
+        if os.path.exists(dst):
+            msg = "%r already exists in %s; build directory %s will not be kept"
+            log.warn(msg, spec.key, self.build_directory, setup_base)
+            return setup_base
+        if os.path.isdir(dist_filename):
+            setup_base = dist_filename
+        else:
+            if os.path.dirname(dist_filename) == setup_base:
+                os.unlink(dist_filename)  # get it out of the tmp dir
+            contents = os.listdir(setup_base)
+            if len(contents) == 1:
+                dist_filename = os.path.join(setup_base, contents[0])
+                if os.path.isdir(dist_filename):
+                    # if the only thing there is a directory, move it instead
+                    setup_base = dist_filename
+        ensure_directory(dst)
+        shutil.move(setup_base, dst)
+        return dst
+
+    def install_wrapper_scripts(self, dist) -> None:
+        if self.exclude_scripts:
+            return
+        for args in ScriptWriter.best().get_args(dist):
+            self.write_script(*args)
+
+    def install_script(self, dist, script_name, script_text, dev_path=None) -> None:
+        """Generate a legacy script wrapper and install it"""
+        spec = str(dist.as_requirement())
+        is_script = is_python_script(script_text, script_name)
+
+        if is_script:
+            body = self._load_template(dev_path) % locals()
+            script_text = ScriptWriter.get_header(script_text) + body
+        self.write_script(script_name, _to_bytes(script_text), 'b')
+
+    @staticmethod
+    def _load_template(dev_path):
+        """
+        There are a couple of template scripts in the package. This
+        function loads one of them and prepares it for use.
+        """
+        # See https://github.com/pypa/setuptools/issues/134 for info
+        # on script file naming and downstream issues with SVR4
+        name = 'script.tmpl'
+        if dev_path:
+            name = name.replace('.tmpl', ' (dev).tmpl')
+
+        raw_bytes = resource_string('setuptools', name)
+        return raw_bytes.decode('utf-8')
+
+    def write_script(self, script_name, contents, mode: str = "t", blockers=()) -> None:
+        """Write an executable file to the scripts directory"""
+        self.delete_blockers(  # clean up old .py/.pyw w/o a script
+            [os.path.join(self.script_dir, x) for x in blockers]
+        )
+        log.info("Installing %s script to %s", script_name, self.script_dir)
+        target = os.path.join(self.script_dir, script_name)
+        self.add_output(target)
+
+        if self.dry_run:
+            return
+
+        mask = current_umask()
+        ensure_directory(target)
+        if os.path.exists(target):
+            os.unlink(target)
+
+        encoding = None if "b" in mode else "utf-8"
+        with open(target, "w" + mode, encoding=encoding) as f:
+            f.write(contents)
+        chmod(target, 0o777 - mask)
+
+    def install_eggs(self, spec, dist_filename, tmpdir) -> list[Distribution]:
+        # .egg dirs or files are already built, so just return them
+        installer_map = {
+            '.egg': self.install_egg,
+            '.exe': self.install_exe,
+            '.whl': self.install_wheel,
+        }
+        try:
+            install_dist = installer_map[dist_filename.lower()[-4:]]
+        except KeyError:
+            pass
+        else:
+            return [install_dist(dist_filename, tmpdir)]
+
+        # Anything else, try to extract and build
+        setup_base = tmpdir
+        if os.path.isfile(dist_filename) and not dist_filename.endswith('.py'):
+            unpack_archive(dist_filename, tmpdir, self.unpack_progress)
+        elif os.path.isdir(dist_filename):
+            setup_base = os.path.abspath(dist_filename)
+
+        if (
+            setup_base.startswith(tmpdir)  # something we downloaded
+            and self.build_directory
+            and spec is not None
+        ):
+            setup_base = self.maybe_move(spec, dist_filename, setup_base)
+
+        # Find the setup.py file
+        setup_script = os.path.join(setup_base, 'setup.py')
+
+        if not os.path.exists(setup_script):
+            setups = glob(os.path.join(setup_base, '*', 'setup.py'))
+            if not setups:
+                raise DistutilsError(
+                    f"Couldn't find a setup script in {os.path.abspath(dist_filename)}"
+                )
+            if len(setups) > 1:
+                raise DistutilsError(
+                    f"Multiple setup scripts in {os.path.abspath(dist_filename)}"
+                )
+            setup_script = setups[0]
+
+        # Now run it, and return the result
+        if self.editable:
+            log.info(self.report_editable(spec, setup_script))
+            return []
+        else:
+            return self.build_and_install(setup_script, setup_base)
+
+    def egg_distribution(self, egg_path):
+        if os.path.isdir(egg_path):
+            metadata = PathMetadata(egg_path, os.path.join(egg_path, 'EGG-INFO'))
+        else:
+            metadata = EggMetadata(zipimport.zipimporter(egg_path))
+        return Distribution.from_filename(egg_path, metadata=metadata)
+
+    # FIXME: 'easy_install.install_egg' is too complex (11)
+    def install_egg(self, egg_path, tmpdir):
+        destination = os.path.join(
+            self.install_dir,
+            os.path.basename(egg_path),
+        )
+        destination = os.path.abspath(destination)
+        if not self.dry_run:
+            ensure_directory(destination)
+
+        dist = self.egg_distribution(egg_path)
+        if not (
+            os.path.exists(destination) and os.path.samefile(egg_path, destination)
+        ):
+            if os.path.isdir(destination) and not os.path.islink(destination):
+                dir_util.remove_tree(destination, dry_run=self.dry_run)
+            elif os.path.exists(destination):
+                self.execute(
+                    os.unlink,
+                    (destination,),
+                    "Removing " + destination,
+                )
+            try:
+                new_dist_is_zipped = False
+                if os.path.isdir(egg_path):
+                    if egg_path.startswith(tmpdir):
+                        f, m = shutil.move, "Moving"
+                    else:
+                        f, m = shutil.copytree, "Copying"
+                elif self.should_unzip(dist):
+                    self.mkpath(destination)
+                    f, m = self.unpack_and_compile, "Extracting"
+                else:
+                    new_dist_is_zipped = True
+                    if egg_path.startswith(tmpdir):
+                        f, m = shutil.move, "Moving"
+                    else:
+                        f, m = shutil.copy2, "Copying"
+                self.execute(
+                    f,
+                    (egg_path, destination),
+                    (m + " %s to %s")
+                    % (os.path.basename(egg_path), os.path.dirname(destination)),
+                )
+                update_dist_caches(
+                    destination,
+                    fix_zipimporter_caches=new_dist_is_zipped,
+                )
+            except Exception:
+                update_dist_caches(destination, fix_zipimporter_caches=False)
+                raise
+
+        self.add_output(destination)
+        return self.egg_distribution(destination)
+
+    def install_exe(self, dist_filename, tmpdir):
+        # See if it's valid, get data
+        cfg = extract_wininst_cfg(dist_filename)
+        if cfg is None:
+            raise DistutilsError(
+                f"{dist_filename} is not a valid distutils Windows .exe"
+            )
+        # Create a dummy distribution object until we build the real distro
+        dist = Distribution(
+            None,
+            project_name=cfg.get('metadata', 'name'),
+            version=cfg.get('metadata', 'version'),
+            platform=get_platform(),
+        )
+
+        # Convert the .exe to an unpacked egg
+        egg_path = os.path.join(tmpdir, dist.egg_name() + '.egg')
+        dist.location = egg_path
+        egg_tmp = egg_path + '.tmp'
+        _egg_info = os.path.join(egg_tmp, 'EGG-INFO')
+        pkg_inf = os.path.join(_egg_info, 'PKG-INFO')
+        ensure_directory(pkg_inf)  # make sure EGG-INFO dir exists
+        dist._provider = PathMetadata(egg_tmp, _egg_info)  # XXX
+        self.exe_to_egg(dist_filename, egg_tmp)
+
+        # Write EGG-INFO/PKG-INFO
+        if not os.path.exists(pkg_inf):
+            with open(pkg_inf, 'w', encoding="utf-8") as f:
+                f.write('Metadata-Version: 1.0\n')
+                for k, v in cfg.items('metadata'):
+                    if k != 'target_version':
+                        k = k.replace('_', '-').title()
+                        f.write(f'{k}: {v}\n')
+        script_dir = os.path.join(_egg_info, 'scripts')
+        # delete entry-point scripts to avoid duping
+        self.delete_blockers([
+            os.path.join(script_dir, args[0]) for args in ScriptWriter.get_args(dist)
+        ])
+        # Build .egg file from tmpdir
+        bdist_egg.make_zipfile(
+            egg_path,
+            egg_tmp,
+            verbose=self.verbose,
+            dry_run=self.dry_run,
+        )
+        # install the .egg
+        return self.install_egg(egg_path, tmpdir)
+
+    # FIXME: 'easy_install.exe_to_egg' is too complex (12)
+    def exe_to_egg(self, dist_filename, egg_tmp) -> None:  # noqa: C901
+        """Extract a bdist_wininst to the directories an egg would use"""
+        # Check for .pth file and set up prefix translations
+        prefixes = get_exe_prefixes(dist_filename)
+        to_compile = []
+        native_libs = []
+        top_level = set()
+
+        def process(src, dst):
+            s = src.lower()
+            for old, new in prefixes:
+                if s.startswith(old):
+                    src = new + src[len(old) :]
+                    parts = src.split('/')
+                    dst = os.path.join(egg_tmp, *parts)
+                    dl = dst.lower()
+                    if dl.endswith('.pyd') or dl.endswith('.dll'):
+                        parts[-1] = bdist_egg.strip_module(parts[-1])
+                        top_level.add([os.path.splitext(parts[0])[0]])
+                        native_libs.append(src)
+                    elif dl.endswith('.py') and old != 'SCRIPTS/':
+                        top_level.add([os.path.splitext(parts[0])[0]])
+                        to_compile.append(dst)
+                    return dst
+            if not src.endswith('.pth'):
+                log.warn("WARNING: can't process %s", src)
+            return None
+
+        # extract, tracking .pyd/.dll->native_libs and .py -> to_compile
+        unpack_archive(dist_filename, egg_tmp, process)
+        stubs = []
+        for res in native_libs:
+            if res.lower().endswith('.pyd'):  # create stubs for .pyd's
+                parts = res.split('/')
+                resource = parts[-1]
+                parts[-1] = bdist_egg.strip_module(parts[-1]) + '.py'
+                pyfile = os.path.join(egg_tmp, *parts)
+                to_compile.append(pyfile)
+                stubs.append(pyfile)
+                bdist_egg.write_stub(resource, pyfile)
+        self.byte_compile(to_compile)  # compile .py's
+        bdist_egg.write_safety_flag(
+            os.path.join(egg_tmp, 'EGG-INFO'), bdist_egg.analyze_egg(egg_tmp, stubs)
+        )  # write zip-safety flag
+
+        for name in 'top_level', 'native_libs':
+            if locals()[name]:
+                txt = os.path.join(egg_tmp, 'EGG-INFO', name + '.txt')
+                if not os.path.exists(txt):
+                    with open(txt, 'w', encoding="utf-8") as f:
+                        f.write('\n'.join(locals()[name]) + '\n')
+
+    def install_wheel(self, wheel_path, tmpdir):
+        wheel = Wheel(wheel_path)
+        assert wheel.is_compatible()
+        destination = os.path.join(self.install_dir, wheel.egg_name())
+        destination = os.path.abspath(destination)
+        if not self.dry_run:
+            ensure_directory(destination)
+        if os.path.isdir(destination) and not os.path.islink(destination):
+            dir_util.remove_tree(destination, dry_run=self.dry_run)
+        elif os.path.exists(destination):
+            self.execute(
+                os.unlink,
+                (destination,),
+                "Removing " + destination,
+            )
+        try:
+            self.execute(
+                wheel.install_as_egg,
+                (destination,),
+                (
+                    f"Installing {os.path.basename(wheel_path)} to {os.path.dirname(destination)}"
+                ),
+            )
+        finally:
+            update_dist_caches(destination, fix_zipimporter_caches=False)
+        self.add_output(destination)
+        return self.egg_distribution(destination)
+
+    __mv_warning = textwrap.dedent(
+        """
+        Because this distribution was installed --multi-version, before you can
+        import modules from this package in an application, you will need to
+        'import pkg_resources' and then use a 'require()' call similar to one of
+        these examples, in order to select the desired version:
+
+            pkg_resources.require("%(name)s")  # latest installed version
+            pkg_resources.require("%(name)s==%(version)s")  # this exact version
+            pkg_resources.require("%(name)s>=%(version)s")  # this version or higher
+        """
+    ).lstrip()
+
+    __id_warning = textwrap.dedent(
+        """
+        Note also that the installation directory must be on sys.path at runtime for
+        this to work.  (e.g. by being the application's script directory, by being on
+        PYTHONPATH, or by being added to sys.path by your code.)
+        """
+    )
+
+    def installation_report(self, req, dist, what: str = "Installed") -> str:
+        """Helpful installation message for display to package users"""
+        msg = "\n%(what)s %(eggloc)s%(extras)s"
+        if self.multi_version and not self.no_report:
+            msg += '\n' + self.__mv_warning
+            if self.install_dir not in map(normalize_path, sys.path):
+                msg += '\n' + self.__id_warning
+
+        eggloc = dist.location
+        name = dist.project_name
+        version = dist.version
+        extras = ''  # TODO: self.report_extras(req, dist)
+        return msg % locals()
+
+    __editable_msg = textwrap.dedent(
+        """
+        Extracted editable version of %(spec)s to %(dirname)s
+
+        If it uses setuptools in its setup script, you can activate it in
+        "development" mode by going to that directory and running::
+
+            %(python)s setup.py develop
+
+        See the setuptools documentation for the "develop" command for more info.
+        """
+    ).lstrip()
+
+    def report_editable(self, spec, setup_script):
+        dirname = os.path.dirname(setup_script)
+        python = sys.executable
+        return '\n' + self.__editable_msg % locals()
+
+    def run_setup(self, setup_script, setup_base, args) -> None:
+        sys.modules.setdefault('distutils.command.bdist_egg', bdist_egg)
+        sys.modules.setdefault('distutils.command.egg_info', egg_info)
+
+        args = list(args)
+        if self.verbose > 2:
+            v = 'v' * (self.verbose - 1)
+            args.insert(0, '-' + v)
+        elif self.verbose < 2:
+            args.insert(0, '-q')
+        if self.dry_run:
+            args.insert(0, '-n')
+        log.info("Running %s %s", setup_script[len(setup_base) + 1 :], ' '.join(args))
+        try:
+            run_setup(setup_script, args)
+        except SystemExit as v:
+            raise DistutilsError(f"Setup script exited with {v.args[0]}") from v
+
+    def build_and_install(self, setup_script, setup_base):
+        args = ['bdist_egg', '--dist-dir']
+
+        dist_dir = tempfile.mkdtemp(
+            prefix='egg-dist-tmp-', dir=os.path.dirname(setup_script)
+        )
+        try:
+            self._set_fetcher_options(os.path.dirname(setup_script))
+            args.append(dist_dir)
+
+            self.run_setup(setup_script, setup_base, args)
+            all_eggs = Environment([dist_dir])
+            eggs = [
+                self.install_egg(dist.location, setup_base)
+                for key in all_eggs
+                for dist in all_eggs[key]
+            ]
+            if not eggs and not self.dry_run:
+                log.warn("No eggs found in %s (setup script problem?)", dist_dir)
+            return eggs
+        finally:
+            _rmtree(dist_dir)
+            log.set_verbosity(self.verbose)  # restore our log verbosity
+
+    def _set_fetcher_options(self, base):
+        """
+        When easy_install is about to run bdist_egg on a source dist, that
+        source dist might have 'setup_requires' directives, requiring
+        additional fetching. Ensure the fetcher options given to easy_install
+        are available to that command as well.
+        """
+        # find the fetch options from easy_install and write them out
+        # to the setup.cfg file.
+        ei_opts = self.distribution.get_option_dict('easy_install').copy()
+        fetch_directives = (
+            'find_links',
+            'site_dirs',
+            'index_url',
+            'optimize',
+            'allow_hosts',
+        )
+        fetch_options = {}
+        for key, val in ei_opts.items():
+            if key not in fetch_directives:
+                continue
+            fetch_options[key] = val[1]
+        # create a settings dictionary suitable for `edit_config`
+        settings = dict(easy_install=fetch_options)
+        cfg_filename = os.path.join(base, 'setup.cfg')
+        setopt.edit_config(cfg_filename, settings)
+
+    def update_pth(self, dist) -> None:  # noqa: C901  # is too complex (11)  # FIXME
+        if self.pth_file is None:
+            return
+
+        for d in self.pth_file[dist.key]:  # drop old entries
+            if not self.multi_version and d.location == dist.location:
+                continue
+
+            log.info("Removing %s from easy-install.pth file", d)
+            self.pth_file.remove(d)
+            if d.location in self.shadow_path:
+                self.shadow_path.remove(d.location)
+
+        if not self.multi_version:
+            if dist.location in self.pth_file.paths:
+                log.info(
+                    "%s is already the active version in easy-install.pth",
+                    dist,
+                )
+            else:
+                log.info("Adding %s to easy-install.pth file", dist)
+                self.pth_file.add(dist)  # add new entry
+                if dist.location not in self.shadow_path:
+                    self.shadow_path.append(dist.location)
+
+        if self.dry_run:
+            return
+
+        self.pth_file.save()
+
+        if dist.key != 'setuptools':
+            return
+
+        # Ensure that setuptools itself never becomes unavailable!
+        # XXX should this check for latest version?
+        filename = os.path.join(self.install_dir, 'setuptools.pth')
+        if os.path.islink(filename):
+            os.unlink(filename)
+
+        with open(filename, 'wt', encoding=py312.PTH_ENCODING) as f:
+            # ^-- Python<3.13 require encoding="locale" instead of "utf-8",
+            #     see python/cpython#77102.
+            f.write(self.pth_file.make_relative(dist.location) + '\n')
+
+    def unpack_progress(self, src, dst):
+        # Progress filter for unpacking
+        log.debug("Unpacking %s to %s", src, dst)
+        return dst  # only unpack-and-compile skips files for dry run
+
+    def unpack_and_compile(self, egg_path, destination) -> None:
+        to_compile = []
+        to_chmod = []
+
+        def pf(src, dst):
+            if dst.endswith('.py') and not src.startswith('EGG-INFO/'):
+                to_compile.append(dst)
+            elif dst.endswith('.dll') or dst.endswith('.so'):
+                to_chmod.append(dst)
+            self.unpack_progress(src, dst)
+            return not self.dry_run and dst or None
+
+        unpack_archive(egg_path, destination, pf)
+        self.byte_compile(to_compile)
+        if not self.dry_run:
+            for f in to_chmod:
+                mode = ((os.stat(f)[stat.ST_MODE]) | 0o555) & 0o7755
+                chmod(f, mode)
+
+    def byte_compile(self, to_compile) -> None:
+        if sys.dont_write_bytecode:
+            return
+
+        from distutils.util import byte_compile
+
+        try:
+            # try to make the byte compile messages quieter
+            log.set_verbosity(self.verbose - 1)
+
+            byte_compile(to_compile, optimize=0, force=True, dry_run=self.dry_run)
+            if self.optimize:
+                byte_compile(
+                    to_compile,
+                    optimize=self.optimize,
+                    force=True,
+                    dry_run=self.dry_run,
+                )
+        finally:
+            log.set_verbosity(self.verbose)  # restore original verbosity
+
+    __no_default_msg = textwrap.dedent(
+        """
+        bad install directory or PYTHONPATH
+
+        You are attempting to install a package to a directory that is not
+        on PYTHONPATH and which Python does not read ".pth" files from.  The
+        installation directory you specified (via --install-dir, --prefix, or
+        the distutils default setting) was:
+
+            %s
+
+        and your PYTHONPATH environment variable currently contains:
+
+            %r
+
+        Here are some of your options for correcting the problem:
+
+        * You can choose a different installation directory, i.e., one that is
+          on PYTHONPATH or supports .pth files
+
+        * You can add the installation directory to the PYTHONPATH environment
+          variable.  (It must then also be on PYTHONPATH whenever you run
+          Python and want to use the package(s) you are installing.)
+
+        * You can set up the installation directory to support ".pth" files by
+          using one of the approaches described here:
+
+          https://setuptools.pypa.io/en/latest/deprecated/easy_install.html#custom-installation-locations
+
+
+        Please make the appropriate changes for your system and try again.
+        """
+    ).strip()
+
+    def create_home_path(self) -> None:
+        """Create directories under ~."""
+        if not self.user:
+            return
+        home = convert_path(os.path.expanduser("~"))
+        for path in only_strs(self.config_vars.values()):
+            if path.startswith(home) and not os.path.isdir(path):
+                self.debug_print(f"os.makedirs('{path}', 0o700)")
+                os.makedirs(path, 0o700)
+
+    INSTALL_SCHEMES = dict(
+        posix=dict(
+            install_dir='$base/lib/python$py_version_short/site-packages',
+            script_dir='$base/bin',
+        ),
+    )
+
+    DEFAULT_SCHEME = dict(
+        install_dir='$base/Lib/site-packages',
+        script_dir='$base/Scripts',
+    )
+
+    def _expand(self, *attrs):
+        config_vars = self.get_finalized_command('install').config_vars
+
+        if self.prefix:
+            # Set default install_dir/scripts from --prefix
+            config_vars = dict(config_vars)
+            config_vars['base'] = self.prefix
+            scheme = self.INSTALL_SCHEMES.get(os.name, self.DEFAULT_SCHEME)
+            for attr, val in scheme.items():
+                if getattr(self, attr, None) is None:
+                    setattr(self, attr, val)
+
+        from distutils.util import subst_vars
+
+        for attr in attrs:
+            val = getattr(self, attr)
+            if val is not None:
+                val = subst_vars(val, config_vars)
+                if os.name == 'posix':
+                    val = os.path.expanduser(val)
+                setattr(self, attr, val)
+
+
+def _pythonpath():
+    items = os.environ.get('PYTHONPATH', '').split(os.pathsep)
+    return filter(None, items)
+
+
+def get_site_dirs():
+    """
+    Return a list of 'site' dirs
+    """
+
+    sitedirs = []
+
+    # start with PYTHONPATH
+    sitedirs.extend(_pythonpath())
+
+    prefixes = [sys.prefix]
+    if sys.exec_prefix != sys.prefix:
+        prefixes.append(sys.exec_prefix)
+    for prefix in prefixes:
+        if not prefix:
+            continue
+
+        if sys.platform in ('os2emx', 'riscos'):
+            sitedirs.append(os.path.join(prefix, "Lib", "site-packages"))
+        elif os.sep == '/':
+            sitedirs.extend([
+                os.path.join(
+                    prefix,
+                    "lib",
+                    f"python{sys.version_info.major}.{sys.version_info.minor}",
+                    "site-packages",
+                ),
+                os.path.join(prefix, "lib", "site-python"),
+            ])
+        else:
+            sitedirs.extend([
+                prefix,
+                os.path.join(prefix, "lib", "site-packages"),
+            ])
+        if sys.platform != 'darwin':
+            continue
+
+        # for framework builds *only* we add the standard Apple
+        # locations. Currently only per-user, but /Library and
+        # /Network/Library could be added too
+        if 'Python.framework' not in prefix:
+            continue
+
+        home = os.environ.get('HOME')
+        if not home:
+            continue
+
+        home_sp = os.path.join(
+            home,
+            'Library',
+            'Python',
+            f'{sys.version_info.major}.{sys.version_info.minor}',
+            'site-packages',
+        )
+        sitedirs.append(home_sp)
+    lib_paths = get_path('purelib'), get_path('platlib')
+
+    sitedirs.extend(s for s in lib_paths if s not in sitedirs)
+
+    if site.ENABLE_USER_SITE:
+        sitedirs.append(site.USER_SITE)
+
+    with contextlib.suppress(AttributeError):
+        sitedirs.extend(site.getsitepackages())
+
+    return list(map(normalize_path, sitedirs))
+
+
+def expand_paths(inputs):  # noqa: C901  # is too complex (11)  # FIXME
+    """Yield sys.path directories that might contain "old-style" packages"""
+
+    seen = set()
+
+    for dirname in inputs:
+        dirname = normalize_path(dirname)
+        if dirname in seen:
+            continue
+
+        seen.add(dirname)
+        if not os.path.isdir(dirname):
+            continue
+
+        files = os.listdir(dirname)
+        yield dirname, files
+
+        for name in files:
+            if not name.endswith('.pth'):
+                # We only care about the .pth files
+                continue
+            if name in ('easy-install.pth', 'setuptools.pth'):
+                # Ignore .pth files that we control
+                continue
+
+            # Read the .pth file
+            content = _read_pth(os.path.join(dirname, name))
+            lines = list(yield_lines(content))
+
+            # Yield existing non-dupe, non-import directory lines from it
+            for line in lines:
+                if line.startswith("import"):
+                    continue
+
+                line = normalize_path(line.rstrip())
+                if line in seen:
+                    continue
+
+                seen.add(line)
+                if not os.path.isdir(line):
+                    continue
+
+                yield line, os.listdir(line)
+
+
+def extract_wininst_cfg(dist_filename):
+    """Extract configuration data from a bdist_wininst .exe
+
+    Returns a configparser.RawConfigParser, or None
+    """
+    f = open(dist_filename, 'rb')
+    try:
+        endrec = zipfile._EndRecData(f)
+        if endrec is None:
+            return None
+
+        prepended = (endrec[9] - endrec[5]) - endrec[6]
+        if prepended < 12:  # no wininst data here
+            return None
+        f.seek(prepended - 12)
+
+        tag, cfglen, _bmlen = struct.unpack("<iii", f.read(12))
+        if tag not in (0x1234567A, 0x1234567B):
+            return None  # not a valid tag
+
+        f.seek(prepended - (12 + cfglen))
+        init = {'version': '', 'target_version': ''}
+        cfg = configparser.RawConfigParser(init)
+        try:
+            part = f.read(cfglen)
+            # Read up to the first null byte.
+            config = part.split(b'\0', 1)[0]
+            # Now the config is in bytes, but for RawConfigParser, it should
+            #  be text, so decode it.
+            config = config.decode(sys.getfilesystemencoding())
+            cfg.read_file(io.StringIO(config))
+        except configparser.Error:
+            return None
+        if not cfg.has_section('metadata') or not cfg.has_section('Setup'):
+            return None
+        return cfg
+
+    finally:
+        f.close()
+
+
+def get_exe_prefixes(exe_filename):
+    """Get exe->egg path translations for a given .exe file"""
+
+    prefixes = [
+        ('PURELIB/', ''),
+        ('PLATLIB/pywin32_system32', ''),
+        ('PLATLIB/', ''),
+        ('SCRIPTS/', 'EGG-INFO/scripts/'),
+        ('DATA/lib/site-packages', ''),
+    ]
+    z = zipfile.ZipFile(exe_filename)
+    try:
+        for info in z.infolist():
+            name = info.filename
+            parts = name.split('/')
+            if len(parts) == 3 and parts[2] == 'PKG-INFO':
+                if parts[1].endswith('.egg-info'):
+                    prefixes.insert(0, ('/'.join(parts[:2]), 'EGG-INFO/'))
+                    break
+            if len(parts) != 2 or not name.endswith('.pth'):
+                continue
+            if name.endswith('-nspkg.pth'):
+                continue
+            if parts[0].upper() in ('PURELIB', 'PLATLIB'):
+                contents = z.read(name).decode()
+                for pth in yield_lines(contents):
+                    pth = pth.strip().replace('\\', '/')
+                    if not pth.startswith('import'):
+                        prefixes.append(((f'{parts[0]}/{pth}/'), ''))
+    finally:
+        z.close()
+    prefixes = [(x.lower(), y) for x, y in prefixes]
+    prefixes.sort()
+    prefixes.reverse()
+    return prefixes
+
+
+class PthDistributions(Environment):
+    """A .pth file with Distribution paths in it"""
+
+    def __init__(self, filename, sitedirs=()) -> None:
+        self.filename = filename
+        self.sitedirs = list(map(normalize_path, sitedirs))
+        self.basedir = normalize_path(os.path.dirname(self.filename))
+        self.paths, self.dirty = self._load()
+        # keep a copy if someone manually updates the paths attribute on the instance
+        self._init_paths = self.paths[:]
+        super().__init__([], None, None)
+        for path in yield_lines(self.paths):
+            list(map(self.add, find_distributions(path, True)))
+
+    def _load_raw(self):
+        paths = []
+        dirty = saw_import = False
+        seen = set(self.sitedirs)
+        content = _read_pth(self.filename)
+        for line in content.splitlines():
+            path = line.rstrip()
+            # still keep imports and empty/commented lines for formatting
+            paths.append(path)
+            if line.startswith(('import ', 'from ')):
+                saw_import = True
+                continue
+            stripped_path = path.strip()
+            if not stripped_path or stripped_path.startswith('#'):
+                continue
+            # skip non-existent paths, in case somebody deleted a package
+            # manually, and duplicate paths as well
+            normalized_path = normalize_path(os.path.join(self.basedir, path))
+            if normalized_path in seen or not os.path.exists(normalized_path):
+                log.debug("cleaned up dirty or duplicated %r", path)
+                dirty = True
+                paths.pop()
+                continue
+            seen.add(normalized_path)
+        # remove any trailing empty/blank line
+        while paths and not paths[-1].strip():
+            paths.pop()
+            dirty = True
+        return paths, dirty or (paths and saw_import)
+
+    def _load(self):
+        if os.path.isfile(self.filename):
+            return self._load_raw()
+        return [], False
+
+    def save(self) -> None:
+        """Write changed .pth file back to disk"""
+        # first reload the file
+        last_paths, last_dirty = self._load()
+        # and check that there are no difference with what we have.
+        # there can be difference if someone else has written to the file
+        # since we first loaded it.
+        # we don't want to lose the eventual new paths added since then.
+        for path in last_paths[:]:
+            if path not in self.paths:
+                self.paths.append(path)
+                log.info("detected new path %r", path)
+                last_dirty = True
+            else:
+                last_paths.remove(path)
+        # also, re-check that all paths are still valid before saving them
+        for path in self.paths[:]:
+            if path not in last_paths and not path.startswith((
+                'import ',
+                'from ',
+                '#',
+            )):
+                absolute_path = os.path.join(self.basedir, path)
+                if not os.path.exists(absolute_path):
+                    self.paths.remove(path)
+                    log.info("removing now non-existent path %r", path)
+                    last_dirty = True
+
+        self.dirty |= last_dirty or self.paths != self._init_paths
+        if not self.dirty:
+            return
+
+        rel_paths = list(map(self.make_relative, self.paths))
+        if rel_paths:
+            log.debug("Saving %s", self.filename)
+            lines = self._wrap_lines(rel_paths)
+            data = '\n'.join(lines) + '\n'
+            if os.path.islink(self.filename):
+                os.unlink(self.filename)
+            with open(self.filename, 'wt', encoding=py312.PTH_ENCODING) as f:
+                # ^-- Python<3.13 require encoding="locale" instead of "utf-8",
+                #     see python/cpython#77102.
+                f.write(data)
+        elif os.path.exists(self.filename):
+            log.debug("Deleting empty %s", self.filename)
+            os.unlink(self.filename)
+
+        self.dirty = False
+        self._init_paths[:] = self.paths[:]
+
+    @staticmethod
+    def _wrap_lines(lines):
+        return lines
+
+    def add(self, dist) -> None:
+        """Add `dist` to the distribution map"""
+        new_path = dist.location not in self.paths and (
+            dist.location not in self.sitedirs
+            or
+            # account for '.' being in PYTHONPATH
+            dist.location == os.getcwd()
+        )
+        if new_path:
+            self.paths.append(dist.location)
+            self.dirty = True
+        super().add(dist)
+
+    def remove(self, dist) -> None:
+        """Remove `dist` from the distribution map"""
+        while dist.location in self.paths:
+            self.paths.remove(dist.location)
+            self.dirty = True
+        super().remove(dist)
+
+    def make_relative(self, path):
+        npath, last = os.path.split(normalize_path(path))
+        baselen = len(self.basedir)
+        parts = [last]
+        sep = os.altsep == '/' and '/' or os.sep
+        while len(npath) >= baselen:
+            if npath == self.basedir:
+                parts.append(os.curdir)
+                parts.reverse()
+                return sep.join(parts)
+            npath, last = os.path.split(npath)
+            parts.append(last)
+        else:
+            return path
+
+
+class RewritePthDistributions(PthDistributions):
+    @classmethod
+    def _wrap_lines(cls, lines):
+        yield cls.prelude
+        yield from lines
+        yield cls.postlude
+
+    prelude = _one_liner(
+        """
+        import sys
+        sys.__plen = len(sys.path)
+        """
+    )
+    postlude = _one_liner(
+        """
+        import sys
+        new = sys.path[sys.__plen:]
+        del sys.path[sys.__plen:]
+        p = getattr(sys, '__egginsert', 0)
+        sys.path[p:p] = new
+        sys.__egginsert = p + len(new)
+        """
+    )
+
+
+if os.environ.get('SETUPTOOLS_SYS_PATH_TECHNIQUE', 'raw') == 'rewrite':
+    PthDistributions = RewritePthDistributions  # type: ignore[misc]  # Overwriting type
+
+
+def _first_line_re():
+    """
+    Return a regular expression based on first_line_re suitable for matching
+    strings.
+    """
+    if isinstance(first_line_re.pattern, str):
+        return first_line_re
+
+    # first_line_re in Python >=3.1.4 and >=3.2.1 is a bytes pattern.
+    return re.compile(first_line_re.pattern.decode())
+
+
+def update_dist_caches(dist_path, fix_zipimporter_caches):
+    """
+    Fix any globally cached `dist_path` related data
+
+    `dist_path` should be a path of a newly installed egg distribution (zipped
+    or unzipped).
+
+    sys.path_importer_cache contains finder objects that have been cached when
+    importing data from the original distribution. Any such finders need to be
+    cleared since the replacement distribution might be packaged differently,
+    e.g. a zipped egg distribution might get replaced with an unzipped egg
+    folder or vice versa. Having the old finders cached may then cause Python
+    to attempt loading modules from the replacement distribution using an
+    incorrect loader.
+
+    zipimport.zipimporter objects are Python loaders charged with importing
+    data packaged inside zip archives. If stale loaders referencing the
+    original distribution, are left behind, they can fail to load modules from
+    the replacement distribution. E.g. if an old zipimport.zipimporter instance
+    is used to load data from a new zipped egg archive, it may cause the
+    operation to attempt to locate the requested data in the wrong location -
+    one indicated by the original distribution's zip archive directory
+    information. Such an operation may then fail outright, e.g. report having
+    read a 'bad local file header', or even worse, it may fail silently &
+    return invalid data.
+
+    zipimport._zip_directory_cache contains cached zip archive directory
+    information for all existing zipimport.zipimporter instances and all such
+    instances connected to the same archive share the same cached directory
+    information.
+
+    If asked, and the underlying Python implementation allows it, we can fix
+    all existing zipimport.zipimporter instances instead of having to track
+    them down and remove them one by one, by updating their shared cached zip
+    archive directory information. This, of course, assumes that the
+    replacement distribution is packaged as a zipped egg.
+
+    If not asked to fix existing zipimport.zipimporter instances, we still do
+    our best to clear any remaining zipimport.zipimporter related cached data
+    that might somehow later get used when attempting to load data from the new
+    distribution and thus cause such load operations to fail. Note that when
+    tracking down such remaining stale data, we can not catch every conceivable
+    usage from here, and we clear only those that we know of and have found to
+    cause problems if left alive. Any remaining caches should be updated by
+    whomever is in charge of maintaining them, i.e. they should be ready to
+    handle us replacing their zip archives with new distributions at runtime.
+
+    """
+    # There are several other known sources of stale zipimport.zipimporter
+    # instances that we do not clear here, but might if ever given a reason to
+    # do so:
+    # * Global setuptools pkg_resources.working_set (a.k.a. 'master working
+    # set') may contain distributions which may in turn contain their
+    #   zipimport.zipimporter loaders.
+    # * Several zipimport.zipimporter loaders held by local variables further
+    #   up the function call stack when running the setuptools installation.
+    # * Already loaded modules may have their __loader__ attribute set to the
+    #   exact loader instance used when importing them. Python 3.4 docs state
+    #   that this information is intended mostly for introspection and so is
+    #   not expected to cause us problems.
+    normalized_path = normalize_path(dist_path)
+    _uncache(normalized_path, sys.path_importer_cache)
+    if fix_zipimporter_caches:
+        _replace_zip_directory_cache_data(normalized_path)
+    else:
+        # Here, even though we do not want to fix existing and now stale
+        # zipimporter cache information, we still want to remove it. Related to
+        # Python's zip archive directory information cache, we clear each of
+        # its stale entries in two phases:
+        #   1. Clear the entry so attempting to access zip archive information
+        #      via any existing stale zipimport.zipimporter instances fails.
+        #   2. Remove the entry from the cache so any newly constructed
+        #      zipimport.zipimporter instances do not end up using old stale
+        #      zip archive directory information.
+        # This whole stale data removal step does not seem strictly necessary,
+        # but has been left in because it was done before we started replacing
+        # the zip archive directory information cache content if possible, and
+        # there are no relevant unit tests that we can depend on to tell us if
+        # this is really needed.
+        _remove_and_clear_zip_directory_cache_data(normalized_path)
+
+
+def _collect_zipimporter_cache_entries(normalized_path, cache):
+    """
+    Return zipimporter cache entry keys related to a given normalized path.
+
+    Alternative path spellings (e.g. those using different character case or
+    those using alternative path separators) related to the same path are
+    included. Any sub-path entries are included as well, i.e. those
+    corresponding to zip archives embedded in other zip archives.
+
+    """
+    result = []
+    prefix_len = len(normalized_path)
+    for p in cache:
+        np = normalize_path(p)
+        if np.startswith(normalized_path) and np[prefix_len : prefix_len + 1] in (
+            os.sep,
+            '',
+        ):
+            result.append(p)
+    return result
+
+
+def _update_zipimporter_cache(normalized_path, cache, updater=None):
+    """
+    Update zipimporter cache data for a given normalized path.
+
+    Any sub-path entries are processed as well, i.e. those corresponding to zip
+    archives embedded in other zip archives.
+
+    Given updater is a callable taking a cache entry key and the original entry
+    (after already removing the entry from the cache), and expected to update
+    the entry and possibly return a new one to be inserted in its place.
+    Returning None indicates that the entry should not be replaced with a new
+    one. If no updater is given, the cache entries are simply removed without
+    any additional processing, the same as if the updater simply returned None.
+
+    """
+    for p in _collect_zipimporter_cache_entries(normalized_path, cache):
+        # N.B. pypy's custom zipimport._zip_directory_cache implementation does
+        # not support the complete dict interface:
+        # * Does not support item assignment, thus not allowing this function
+        #    to be used only for removing existing cache entries.
+        #  * Does not support the dict.pop() method, forcing us to use the
+        #    get/del patterns instead. For more detailed information see the
+        #    following links:
+        #      https://github.com/pypa/setuptools/issues/202#issuecomment-202913420
+        #      https://foss.heptapod.net/pypy/pypy/-/blob/144c4e65cb6accb8e592f3a7584ea38265d1873c/pypy/module/zipimport/interp_zipimport.py
+        old_entry = cache[p]
+        del cache[p]
+        new_entry = updater and updater(p, old_entry)
+        if new_entry is not None:
+            cache[p] = new_entry
+
+
+def _uncache(normalized_path, cache):
+    _update_zipimporter_cache(normalized_path, cache)
+
+
+def _remove_and_clear_zip_directory_cache_data(normalized_path):
+    def clear_and_remove_cached_zip_archive_directory_data(path, old_entry):
+        old_entry.clear()
+
+    _update_zipimporter_cache(
+        normalized_path,
+        zipimport._zip_directory_cache,
+        updater=clear_and_remove_cached_zip_archive_directory_data,
+    )
+
+
+# PyPy Python implementation does not allow directly writing to the
+# zipimport._zip_directory_cache and so prevents us from attempting to correct
+# its content. The best we can do there is clear the problematic cache content
+# and have PyPy repopulate it as needed. The downside is that if there are any
+# stale zipimport.zipimporter instances laying around, attempting to use them
+# will fail due to not having its zip archive directory information available
+# instead of being automatically corrected to use the new correct zip archive
+# directory information.
+if '__pypy__' in sys.builtin_module_names:
+    _replace_zip_directory_cache_data = _remove_and_clear_zip_directory_cache_data
+else:
+
+    def _replace_zip_directory_cache_data(normalized_path):
+        def replace_cached_zip_archive_directory_data(path, old_entry):
+            # N.B. In theory, we could load the zip directory information just
+            # once for all updated path spellings, and then copy it locally and
+            # update its contained path strings to contain the correct
+            # spelling, but that seems like a way too invasive move (this cache
+            # structure is not officially documented anywhere and could in
+            # theory change with new Python releases) for no significant
+            # benefit.
+            old_entry.clear()
+            zipimport.zipimporter(path)
+            old_entry.update(zipimport._zip_directory_cache[path])
+            return old_entry
+
+        _update_zipimporter_cache(
+            normalized_path,
+            zipimport._zip_directory_cache,
+            updater=replace_cached_zip_archive_directory_data,
+        )
+
+
+def is_python(text, filename='<string>'):
+    "Is this string a valid Python script?"
+    try:
+        compile(text, filename, 'exec')
+    except (SyntaxError, TypeError):
+        return False
+    else:
+        return True
+
+
+def is_sh(executable):
+    """Determine if the specified executable is a .sh (contains a #! line)"""
+    try:
+        with open(executable, encoding='latin-1') as fp:
+            magic = fp.read(2)
+    except OSError:
+        return executable
+    return magic == '#!'
+
+
+def nt_quote_arg(arg):
+    """Quote a command line argument according to Windows parsing rules"""
+    return subprocess.list2cmdline([arg])
+
+
+def is_python_script(script_text, filename):
+    """Is this text, as a whole, a Python script? (as opposed to shell/bat/etc."""
+    if filename.endswith('.py') or filename.endswith('.pyw'):
+        return True  # extension says it's Python
+    if is_python(script_text, filename):
+        return True  # it's syntactically valid Python
+    if script_text.startswith('#!'):
+        # It begins with a '#!' line, so check if 'python' is in it somewhere
+        return 'python' in script_text.splitlines()[0].lower()
+
+    return False  # Not any Python I can recognize
+
+
+class _SplitArgs(TypedDict, total=False):
+    comments: bool
+    posix: bool
+
+
+class CommandSpec(list):
+    """
+    A command spec for a #! header, specified as a list of arguments akin to
+    those passed to Popen.
+    """
+
+    options: list[str] = []
+    split_args = _SplitArgs()
+
+    @classmethod
+    def best(cls):
+        """
+        Choose the best CommandSpec class based on environmental conditions.
+        """
+        return cls
+
+    @classmethod
+    def _sys_executable(cls):
+        _default = os.path.normpath(sys.executable)
+        return os.environ.get('__PYVENV_LAUNCHER__', _default)
+
+    @classmethod
+    def from_param(cls, param: Self | str | Iterable[str] | None) -> Self:
+        """
+        Construct a CommandSpec from a parameter to build_scripts, which may
+        be None.
+        """
+        if isinstance(param, cls):
+            return param
+        if isinstance(param, str):
+            return cls.from_string(param)
+        if isinstance(param, Iterable):
+            return cls(param)
+        if param is None:
+            return cls.from_environment()
+        raise TypeError(f"Argument has an unsupported type {type(param)}")
+
+    @classmethod
+    def from_environment(cls):
+        return cls([cls._sys_executable()])
+
+    @classmethod
+    def from_string(cls, string: str) -> Self:
+        """
+        Construct a command spec from a simple string representing a command
+        line parseable by shlex.split.
+        """
+        items = shlex.split(string, **cls.split_args)
+        return cls(items)
+
+    def install_options(self, script_text: str):
+        self.options = shlex.split(self._extract_options(script_text))
+        cmdline = subprocess.list2cmdline(self)
+        if not isascii(cmdline):
+            self.options[:0] = ['-x']
+
+    @staticmethod
+    def _extract_options(orig_script):
+        """
+        Extract any options from the first line of the script.
+        """
+        first = (orig_script + '\n').splitlines()[0]
+        match = _first_line_re().match(first)
+        options = match.group(1) or '' if match else ''
+        return options.strip()
+
+    def as_header(self):
+        return self._render(self + list(self.options))
+
+    @staticmethod
+    def _strip_quotes(item):
+        _QUOTES = '"\''
+        for q in _QUOTES:
+            if item.startswith(q) and item.endswith(q):
+                return item[1:-1]
+        return item
+
+    @staticmethod
+    def _render(items):
+        cmdline = subprocess.list2cmdline(
+            CommandSpec._strip_quotes(item.strip()) for item in items
+        )
+        return '#!' + cmdline + '\n'
+
+
+# For pbr compat; will be removed in a future version.
+sys_executable = CommandSpec._sys_executable()
+
+
+class WindowsCommandSpec(CommandSpec):
+    split_args = _SplitArgs(posix=False)
+
+
+class ScriptWriter:
+    """
+    Encapsulates behavior around writing entry point scripts for console and
+    gui apps.
+    """
+
+    template = textwrap.dedent(
+        r"""
+        # EASY-INSTALL-ENTRY-SCRIPT: %(spec)r,%(group)r,%(name)r
+        import re
+        import sys
+
+        # for compatibility with easy_install; see #2198
+        __requires__ = %(spec)r
+
+        try:
+            from importlib.metadata import distribution
+        except ImportError:
+            try:
+                from importlib_metadata import distribution
+            except ImportError:
+                from pkg_resources import load_entry_point
+
+
+        def importlib_load_entry_point(spec, group, name):
+            dist_name, _, _ = spec.partition('==')
+            matches = (
+                entry_point
+                for entry_point in distribution(dist_name).entry_points
+                if entry_point.group == group and entry_point.name == name
+            )
+            return next(matches).load()
+
+
+        globals().setdefault('load_entry_point', importlib_load_entry_point)
+
+
+        if __name__ == '__main__':
+            sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
+            sys.exit(load_entry_point(%(spec)r, %(group)r, %(name)r)())
+        """
+    ).lstrip()
+
+    command_spec_class = CommandSpec
+
+    @classmethod
+    def get_args(cls, dist, header=None):
+        """
+        Yield write_script() argument tuples for a distribution's
+        console_scripts and gui_scripts entry points.
+        """
+        if header is None:
+            header = cls.get_header()
+        spec = str(dist.as_requirement())
+        for type_ in 'console', 'gui':
+            group = type_ + '_scripts'
+            for name in dist.get_entry_map(group).keys():
+                cls._ensure_safe_name(name)
+                script_text = cls.template % locals()
+                args = cls._get_script_args(type_, name, header, script_text)
+                yield from args
+
+    @staticmethod
+    def _ensure_safe_name(name):
+        """
+        Prevent paths in *_scripts entry point names.
+        """
+        has_path_sep = re.search(r'[\\/]', name)
+        if has_path_sep:
+            raise ValueError("Path separators not allowed in script names")
+
+    @classmethod
+    def best(cls):
+        """
+        Select the best ScriptWriter for this environment.
+        """
+        if sys.platform == 'win32' or (os.name == 'java' and os._name == 'nt'):
+            return WindowsScriptWriter.best()
+        else:
+            return cls
+
+    @classmethod
+    def _get_script_args(cls, type_, name, header, script_text):
+        # Simply write the stub with no extension.
+        yield (name, header + script_text)
+
+    @classmethod
+    def get_header(
+        cls,
+        script_text: str = "",
+        executable: str | CommandSpec | Iterable[str] | None = None,
+    ) -> str:
+        """Create a #! line, getting options (if any) from script_text"""
+        cmd = cls.command_spec_class.best().from_param(executable)
+        cmd.install_options(script_text)
+        return cmd.as_header()
+
+
+class WindowsScriptWriter(ScriptWriter):
+    command_spec_class = WindowsCommandSpec
+
+    @classmethod
+    def best(cls):
+        """
+        Select the best ScriptWriter suitable for Windows
+        """
+        writer_lookup = dict(
+            executable=WindowsExecutableLauncherWriter,
+            natural=cls,
+        )
+        # for compatibility, use the executable launcher by default
+        launcher = os.environ.get('SETUPTOOLS_LAUNCHER', 'executable')
+        return writer_lookup[launcher]
+
+    @classmethod
+    def _get_script_args(cls, type_, name, header, script_text):
+        "For Windows, add a .py extension"
+        ext = dict(console='.pya', gui='.pyw')[type_]
+        if ext not in os.environ['PATHEXT'].lower().split(';'):
+            msg = (
+                "{ext} not listed in PATHEXT; scripts will not be "
+                "recognized as executables."
+            ).format(**locals())
+            SetuptoolsWarning.emit(msg)
+        old = ['.pya', '.py', '-script.py', '.pyc', '.pyo', '.pyw', '.exe']
+        old.remove(ext)
+        header = cls._adjust_header(type_, header)
+        blockers = [name + x for x in old]
+        yield name + ext, header + script_text, 't', blockers
+
+    @classmethod
+    def _adjust_header(cls, type_, orig_header):
+        """
+        Make sure 'pythonw' is used for gui and 'python' is used for
+        console (regardless of what sys.executable is).
+        """
+        pattern = 'pythonw.exe'
+        repl = 'python.exe'
+        if type_ == 'gui':
+            pattern, repl = repl, pattern
+        pattern_ob = re.compile(re.escape(pattern), re.IGNORECASE)
+        new_header = pattern_ob.sub(string=orig_header, repl=repl)
+        return new_header if cls._use_header(new_header) else orig_header
+
+    @staticmethod
+    def _use_header(new_header):
+        """
+        Should _adjust_header use the replaced header?
+
+        On non-windows systems, always use. On
+        Windows systems, only use the replaced header if it resolves
+        to an executable on the system.
+        """
+        clean_header = new_header[2:-1].strip('"')
+        return sys.platform != 'win32' or shutil.which(clean_header)
+
+
+class WindowsExecutableLauncherWriter(WindowsScriptWriter):
+    @classmethod
+    def _get_script_args(cls, type_, name, header, script_text):
+        """
+        For Windows, add a .py extension and an .exe launcher
+        """
+        if type_ == 'gui':
+            launcher_type = 'gui'
+            ext = '-script.pyw'
+            old = ['.pyw']
+        else:
+            launcher_type = 'cli'
+            ext = '-script.py'
+            old = ['.py', '.pyc', '.pyo']
+        hdr = cls._adjust_header(type_, header)
+        blockers = [name + x for x in old]
+        yield (name + ext, hdr + script_text, 't', blockers)
+        yield (
+            name + '.exe',
+            get_win_launcher(launcher_type),
+            'b',  # write in binary mode
+        )
+        if not is_64bit():
+            # install a manifest for the launcher to prevent Windows
+            # from detecting it as an installer (which it will for
+            #  launchers like easy_install.exe). Consider only
+            #  adding a manifest for launchers detected as installers.
+            #  See Distribute #143 for details.
+            m_name = name + '.exe.manifest'
+            yield (m_name, load_launcher_manifest(name), 't')
+
+
+def get_win_launcher(type):
+    """
+    Load the Windows launcher (executable) suitable for launching a script.
+
+    `type` should be either 'cli' or 'gui'
+
+    Returns the executable as a byte string.
+    """
+    launcher_fn = f'{type}.exe'
+    if is_64bit():
+        if get_platform() == "win-arm64":
+            launcher_fn = launcher_fn.replace(".", "-arm64.")
+        else:
+            launcher_fn = launcher_fn.replace(".", "-64.")
+    else:
+        launcher_fn = launcher_fn.replace(".", "-32.")
+    return resource_string('setuptools', launcher_fn)
+
+
+def load_launcher_manifest(name):
+    manifest = pkg_resources.resource_string(__name__, 'launcher manifest.xml')
+    return manifest.decode('utf-8') % vars()
+
+
+def current_umask():
+    tmp = os.umask(0o022)
+    os.umask(tmp)
+    return tmp
+
+
+def only_strs(values):
+    """
+    Exclude non-str values. Ref #3063.
+    """
+    return filter(lambda val: isinstance(val, str), values)
+
+
+def _read_pth(fullname: str) -> str:
+    # Python<3.13 require encoding="locale" instead of "utf-8", see python/cpython#77102
+    # In the case old versions of setuptools are producing `pth` files with
+    # different encodings that might be problematic... So we fallback to "locale".
+
+    try:
+        with open(fullname, encoding=py312.PTH_ENCODING) as f:
+            return f.read()
+    except UnicodeDecodeError:  # pragma: no cover
+        # This error may only happen for Python >= 3.13
+        # TODO: Possible deprecation warnings to be added in the future:
+        #       ``.pth file {fullname!r} is not UTF-8.``
+        #       Your environment contain {fullname!r} that cannot be read as UTF-8.
+        #       This is likely to have been produced with an old version of setuptools.
+        #       Please be mindful that this is deprecated and in the future, non-utf8
+        #       .pth files may cause setuptools to fail.
+        with open(fullname, encoding=py39.LOCALE_ENCODING) as f:
+            return f.read()
+
+
+class EasyInstallDeprecationWarning(SetuptoolsDeprecationWarning):
+    _SUMMARY = "easy_install command is deprecated."
+    _DETAILS = """
+    Please avoid running ``setup.py`` and ``easy_install``.
+    Instead, use pypa/build, pypa/installer or other
+    standards-based tools.
+    """
+    _SEE_URL = "https://github.com/pypa/setuptools/issues/917"
+    # _DUE_DATE not defined yet
diff --git a/.venv/lib/python3.12/site-packages/setuptools/command/editable_wheel.py b/.venv/lib/python3.12/site-packages/setuptools/command/editable_wheel.py
new file mode 100644
index 00000000..1a544ec2
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/setuptools/command/editable_wheel.py
@@ -0,0 +1,925 @@
+"""
+Create a wheel that, when installed, will make the source package 'editable'
+(add it to the interpreter's path, including metadata) per PEP 660. Replaces
+'setup.py develop'.
+
+.. note::
+   One of the mechanisms briefly mentioned in PEP 660 to implement editable installs is
+   to create a separated directory inside ``build`` and use a .pth file to point to that
+   directory. In the context of this file such directory is referred as
+   *auxiliary build directory* or ``auxiliary_dir``.
+"""
+
+from __future__ import annotations
+
+import io
+import logging
+import os
+import shutil
+import traceback
+from collections.abc import Iterable, Iterator, Mapping
+from contextlib import suppress
+from enum import Enum
+from inspect import cleandoc
+from itertools import chain, starmap
+from pathlib import Path
+from tempfile import TemporaryDirectory
+from types import TracebackType
+from typing import TYPE_CHECKING, Protocol, TypeVar, cast
+
+from .. import Command, _normalization, _path, _shutil, errors, namespaces
+from .._path import StrPath
+from ..compat import py312
+from ..discovery import find_package_path
+from ..dist import Distribution
+from ..warnings import InformationOnly, SetuptoolsDeprecationWarning, SetuptoolsWarning
+from .build import build as build_cls
+from .build_py import build_py as build_py_cls
+from .dist_info import dist_info as dist_info_cls
+from .egg_info import egg_info as egg_info_cls
+from .install import install as install_cls
+from .install_scripts import install_scripts as install_scripts_cls
+
+if TYPE_CHECKING:
+    from typing_extensions import Self
+
+    from .._vendor.wheel.wheelfile import WheelFile
+
+_P = TypeVar("_P", bound=StrPath)
+_logger = logging.getLogger(__name__)
+
+
+class _EditableMode(Enum):
+    """
+    Possible editable installation modes:
+    `lenient` (new files automatically added to the package - DEFAULT);
+    `strict` (requires a new installation when files are added/removed); or
+    `compat` (attempts to emulate `python setup.py develop` - DEPRECATED).
+    """
+
+    STRICT = "strict"
+    LENIENT = "lenient"
+    COMPAT = "compat"  # TODO: Remove `compat` after Dec/2022.
+
+    @classmethod
+    def convert(cls, mode: str | None) -> _EditableMode:
+        if not mode:
+            return _EditableMode.LENIENT  # default
+
+        _mode = mode.upper()
+        if _mode not in _EditableMode.__members__:
+            raise errors.OptionError(f"Invalid editable mode: {mode!r}. Try: 'strict'.")
+
+        if _mode == "COMPAT":
+            SetuptoolsDeprecationWarning.emit(
+                "Compat editable installs",
+                """
+                The 'compat' editable mode is transitional and will be removed
+                in future versions of `setuptools`.
+                Please adapt your code accordingly to use either the 'strict' or the
+                'lenient' modes.
+                """,
+                see_docs="userguide/development_mode.html",
+                # TODO: define due_date
+                # There is a series of shortcomings with the available editable install
+                # methods, and they are very controversial. This is something that still
+                # needs work.
+                # Moreover, `pip` is still hiding this warning, so users are not aware.
+            )
+
+        return _EditableMode[_mode]
+
+
+_STRICT_WARNING = """
+New or renamed files may not be automatically picked up without a new installation.
+"""
+
+_LENIENT_WARNING = """
+Options like `package-data`, `include/exclude-package-data` or
+`packages.find.exclude/include` may have no effect.
+"""
+
+
+class editable_wheel(Command):
+    """Build 'editable' wheel for development.
+    This command is private and reserved for internal use of setuptools,
+    users should rely on ``setuptools.build_meta`` APIs.
+    """
+
+    description = "DO NOT CALL DIRECTLY, INTERNAL ONLY: create PEP 660 editable wheel"
+
+    user_options = [
+        ("dist-dir=", "d", "directory to put final built distributions in"),
+        ("dist-info-dir=", "I", "path to a pre-build .dist-info directory"),
+        ("mode=", None, cleandoc(_EditableMode.__doc__ or "")),
+    ]
+
+    def initialize_options(self):
+        self.dist_dir = None
+        self.dist_info_dir = None
+        self.project_dir = None
+        self.mode = None
+
+    def finalize_options(self) -> None:
+        dist = self.distribution
+        self.project_dir = dist.src_root or os.curdir
+        self.package_dir = dist.package_dir or {}
+        self.dist_dir = Path(self.dist_dir or os.path.join(self.project_dir, "dist"))
+
+    def run(self) -> None:
+        try:
+            self.dist_dir.mkdir(exist_ok=True)
+            self._ensure_dist_info()
+
+            # Add missing dist_info files
+            self.reinitialize_command("bdist_wheel")
+            bdist_wheel = self.get_finalized_command("bdist_wheel")
+            bdist_wheel.write_wheelfile(self.dist_info_dir)
+
+            self._create_wheel_file(bdist_wheel)
+        except Exception:
+            traceback.print_exc()
+            project = self.distribution.name or self.distribution.get_name()
+            _DebuggingTips.emit(project=project)
+            raise
+
+    def _ensure_dist_info(self):
+        if self.dist_info_dir is None:
+            dist_info = cast(dist_info_cls, self.reinitialize_command("dist_info"))
+            dist_info.output_dir = self.dist_dir
+            dist_info.ensure_finalized()
+            dist_info.run()
+            self.dist_info_dir = dist_info.dist_info_dir
+        else:
+            assert str(self.dist_info_dir).endswith(".dist-info")
+            assert Path(self.dist_info_dir, "METADATA").exists()
+
+    def _install_namespaces(self, installation_dir, pth_prefix):
+        # XXX: Only required to support the deprecated namespace practice
+        dist = self.distribution
+        if not dist.namespace_packages:
+            return
+
+        src_root = Path(self.project_dir, self.package_dir.get("", ".")).resolve()
+        installer = _NamespaceInstaller(dist, installation_dir, pth_prefix, src_root)
+        installer.install_namespaces()
+
+    def _find_egg_info_dir(self) -> str | None:
+        parent_dir = Path(self.dist_info_dir).parent if self.dist_info_dir else Path()
+        candidates = map(str, parent_dir.glob("*.egg-info"))
+        return next(candidates, None)
+
+    def _configure_build(
+        self, name: str, unpacked_wheel: StrPath, build_lib: StrPath, tmp_dir: StrPath
+    ):
+        """Configure commands to behave in the following ways:
+
+        - Build commands can write to ``build_lib`` if they really want to...
+          (but this folder is expected to be ignored and modules are expected to live
+          in the project directory...)
+        - Binary extensions should be built in-place (editable_mode = True)
+        - Data/header/script files are not part of the "editable" specification
+          so they are written directly to the unpacked_wheel directory.
+        """
+        # Non-editable files (data, headers, scripts) are written directly to the
+        # unpacked_wheel
+
+        dist = self.distribution
+        wheel = str(unpacked_wheel)
+        build_lib = str(build_lib)
+        data = str(Path(unpacked_wheel, f"{name}.data", "data"))
+        headers = str(Path(unpacked_wheel, f"{name}.data", "headers"))
+        scripts = str(Path(unpacked_wheel, f"{name}.data", "scripts"))
+
+        # egg-info may be generated again to create a manifest (used for package data)
+        egg_info = cast(
+            egg_info_cls, dist.reinitialize_command("egg_info", reinit_subcommands=True)
+        )
+        egg_info.egg_base = str(tmp_dir)
+        egg_info.ignore_egg_info_in_manifest = True
+
+        build = cast(
+            build_cls, dist.reinitialize_command("build", reinit_subcommands=True)
+        )
+        install = cast(
+            install_cls, dist.reinitialize_command("install", reinit_subcommands=True)
+        )
+
+        build.build_platlib = build.build_purelib = build.build_lib = build_lib
+        install.install_purelib = install.install_platlib = install.install_lib = wheel
+        install.install_scripts = build.build_scripts = scripts
+        install.install_headers = headers
+        install.install_data = data
+
+        install_scripts = cast(
+            install_scripts_cls, dist.get_command_obj("install_scripts")
+        )
+        install_scripts.no_ep = True
+
+        build.build_temp = str(tmp_dir)
+
+        build_py = cast(build_py_cls, dist.get_command_obj("build_py"))
+        build_py.compile = False
+        build_py.existing_egg_info_dir = self._find_egg_info_dir()
+
+        self._set_editable_mode()
+
+        build.ensure_finalized()
+        install.ensure_finalized()
+
+    def _set_editable_mode(self):
+        """Set the ``editable_mode`` flag in the build sub-commands"""
+        dist = self.distribution
+        build = dist.get_command_obj("build")
+        for cmd_name in build.get_sub_commands():
+            cmd = dist.get_command_obj(cmd_name)
+            if hasattr(cmd, "editable_mode"):
+                cmd.editable_mode = True
+            elif hasattr(cmd, "inplace"):
+                cmd.inplace = True  # backward compatibility with distutils
+
+    def _collect_build_outputs(self) -> tuple[list[str], dict[str, str]]:
+        files: list[str] = []
+        mapping: dict[str, str] = {}
+        build = self.get_finalized_command("build")
+
+        for cmd_name in build.get_sub_commands():
+            cmd = self.get_finalized_command(cmd_name)
+            if hasattr(cmd, "get_outputs"):
+                files.extend(cmd.get_outputs() or [])
+            if hasattr(cmd, "get_output_mapping"):
+                mapping.update(cmd.get_output_mapping() or {})
+
+        return files, mapping
+
+    def _run_build_commands(
+        self,
+        dist_name: str,
+        unpacked_wheel: StrPath,
+        build_lib: StrPath,
+        tmp_dir: StrPath,
+    ) -> tuple[list[str], dict[str, str]]:
+        self._configure_build(dist_name, unpacked_wheel, build_lib, tmp_dir)
+        self._run_build_subcommands()
+        files, mapping = self._collect_build_outputs()
+        self._run_install("headers")
+        self._run_install("scripts")
+        self._run_install("data")
+        return files, mapping
+
+    def _run_build_subcommands(self) -> None:
+        """
+        Issue #3501 indicates that some plugins/customizations might rely on:
+
+        1. ``build_py`` not running
+        2. ``build_py`` always copying files to ``build_lib``
+
+        However both these assumptions may be false in editable_wheel.
+        This method implements a temporary workaround to support the ecosystem
+        while the implementations catch up.
+        """
+        # TODO: Once plugins/customizations had the chance to catch up, replace
+        #       `self._run_build_subcommands()` with `self.run_command("build")`.
+        #       Also remove _safely_run, TestCustomBuildPy. Suggested date: Aug/2023.
+        build = self.get_finalized_command("build")
+        for name in build.get_sub_commands():
+            cmd = self.get_finalized_command(name)
+            if name == "build_py" and type(cmd) is not build_py_cls:
+                self._safely_run(name)
+            else:
+                self.run_command(name)
+
+    def _safely_run(self, cmd_name: str):
+        try:
+            return self.run_command(cmd_name)
+        except Exception:
+            SetuptoolsDeprecationWarning.emit(
+                "Customization incompatible with editable install",
+                f"""
+                {traceback.format_exc()}
+
+                If you are seeing this warning it is very likely that a setuptools
+                plugin or customization overrides the `{cmd_name}` command, without
+                taking into consideration how editable installs run build steps
+                starting from setuptools v64.0.0.
+
+                Plugin authors and developers relying on custom build steps are
+                encouraged to update their `{cmd_name}` implementation considering the
+                information about editable installs in
+                https://setuptools.pypa.io/en/latest/userguide/extension.html.
+
+                For the time being `setuptools` will silence this error and ignore
+                the faulty command, but this behavior will change in future versions.
+                """,
+                # TODO: define due_date
+                # There is a series of shortcomings with the available editable install
+                # methods, and they are very controversial. This is something that still
+                # needs work.
+            )
+
+    def _create_wheel_file(self, bdist_wheel):
+        from wheel.wheelfile import WheelFile
+
+        dist_info = self.get_finalized_command("dist_info")
+        dist_name = dist_info.name
+        tag = "-".join(bdist_wheel.get_tag())
+        build_tag = "0.editable"  # According to PEP 427 needs to start with digit
+        archive_name = f"{dist_name}-{build_tag}-{tag}.whl"
+        wheel_path = Path(self.dist_dir, archive_name)
+        if wheel_path.exists():
+            wheel_path.unlink()
+
+        unpacked_wheel = TemporaryDirectory(suffix=archive_name)
+        build_lib = TemporaryDirectory(suffix=".build-lib")
+        build_tmp = TemporaryDirectory(suffix=".build-temp")
+
+        with unpacked_wheel as unpacked, build_lib as lib, build_tmp as tmp:
+            unpacked_dist_info = Path(unpacked, Path(self.dist_info_dir).name)
+            shutil.copytree(self.dist_info_dir, unpacked_dist_info)
+            self._install_namespaces(unpacked, dist_name)
+            files, mapping = self._run_build_commands(dist_name, unpacked, lib, tmp)
+            strategy = self._select_strategy(dist_name, tag, lib)
+            with strategy, WheelFile(wheel_path, "w") as wheel_obj:
+                strategy(wheel_obj, files, mapping)
+                wheel_obj.write_files(unpacked)
+
+        return wheel_path
+
+    def _run_install(self, category: str):
+        has_category = getattr(self.distribution, f"has_{category}", None)
+        if has_category and has_category():
+            _logger.info(f"Installing {category} as non editable")
+            self.run_command(f"install_{category}")
+
+    def _select_strategy(
+        self,
+        name: str,
+        tag: str,
+        build_lib: StrPath,
+    ) -> EditableStrategy:
+        """Decides which strategy to use to implement an editable installation."""
+        build_name = f"__editable__.{name}-{tag}"
+        project_dir = Path(self.project_dir)
+        mode = _EditableMode.convert(self.mode)
+
+        if mode is _EditableMode.STRICT:
+            auxiliary_dir = _empty_dir(Path(self.project_dir, "build", build_name))
+            return _LinkTree(self.distribution, name, auxiliary_dir, build_lib)
+
+        packages = _find_packages(self.distribution)
+        has_simple_layout = _simple_layout(packages, self.package_dir, project_dir)
+        is_compat_mode = mode is _EditableMode.COMPAT
+        if set(self.package_dir) == {""} and has_simple_layout or is_compat_mode:
+            # src-layout(ish) is relatively safe for a simple pth file
+            src_dir = self.package_dir.get("", ".")
+            return _StaticPth(self.distribution, name, [Path(project_dir, src_dir)])
+
+        # Use a MetaPathFinder to avoid adding accidental top-level packages/modules
+        return _TopLevelFinder(self.distribution, name)
+
+
+class EditableStrategy(Protocol):
+    def __call__(
+        self, wheel: WheelFile, files: list[str], mapping: Mapping[str, str]
+    ) -> object: ...
+    def __enter__(self) -> Self: ...
+    def __exit__(
+        self,
+        _exc_type: type[BaseException] | None,
+        _exc_value: BaseException | None,
+        _traceback: TracebackType | None,
+    ) -> object: ...
+
+
+class _StaticPth:
+    def __init__(self, dist: Distribution, name: str, path_entries: list[Path]) -> None:
+        self.dist = dist
+        self.name = name
+        self.path_entries = path_entries
+
+    def __call__(self, wheel: WheelFile, files: list[str], mapping: Mapping[str, str]):
+        entries = "\n".join(str(p.resolve()) for p in self.path_entries)
+        contents = _encode_pth(f"{entries}\n")
+        wheel.writestr(f"__editable__.{self.name}.pth", contents)
+
+    def __enter__(self) -> Self:
+        msg = f"""
+        Editable install will be performed using .pth file to extend `sys.path` with:
+        {list(map(os.fspath, self.path_entries))!r}
+        """
+        _logger.warning(msg + _LENIENT_WARNING)
+        return self
+
+    def __exit__(
+        self,
+        _exc_type: object,
+        _exc_value: object,
+        _traceback: object,
+    ) -> None:
+        pass
+
+
+class _LinkTree(_StaticPth):
+    """
+    Creates a ``.pth`` file that points to a link tree in the ``auxiliary_dir``.
+
+    This strategy will only link files (not dirs), so it can be implemented in
+    any OS, even if that means using hardlinks instead of symlinks.
+
+    By collocating ``auxiliary_dir`` and the original source code, limitations
+    with hardlinks should be avoided.
+    """
+
+    def __init__(
+        self,
+        dist: Distribution,
+        name: str,
+        auxiliary_dir: StrPath,
+        build_lib: StrPath,
+    ) -> None:
+        self.auxiliary_dir = Path(auxiliary_dir)
+        self.build_lib = Path(build_lib).resolve()
+        self._file = dist.get_command_obj("build_py").copy_file
+        super().__init__(dist, name, [self.auxiliary_dir])
+
+    def __call__(self, wheel: WheelFile, files: list[str], mapping: Mapping[str, str]):
+        self._create_links(files, mapping)
+        super().__call__(wheel, files, mapping)
+
+    def _normalize_output(self, file: str) -> str | None:
+        # Files relative to build_lib will be normalized to None
+        with suppress(ValueError):
+            path = Path(file).resolve().relative_to(self.build_lib)
+            return str(path).replace(os.sep, '/')
+        return None
+
+    def _create_file(self, relative_output: str, src_file: str, link=None):
+        dest = self.auxiliary_dir / relative_output
+        if not dest.parent.is_dir():
+            dest.parent.mkdir(parents=True)
+        self._file(src_file, dest, link=link)
+
+    def _create_links(self, outputs, output_mapping: Mapping[str, str]):
+        self.auxiliary_dir.mkdir(parents=True, exist_ok=True)
+        link_type = "sym" if _can_symlink_files(self.auxiliary_dir) else "hard"
+        normalised = ((self._normalize_output(k), v) for k, v in output_mapping.items())
+        # remove files that are not relative to build_lib
+        mappings = {k: v for k, v in normalised if k is not None}
+
+        for output in outputs:
+            relative = self._normalize_output(output)
+            if relative and relative not in mappings:
+                self._create_file(relative, output)
+
+        for relative, src in mappings.items():
+            self._create_file(relative, src, link=link_type)
+
+    def __enter__(self) -> Self:
+        msg = "Strict editable install will be performed using a link tree.\n"
+        _logger.warning(msg + _STRICT_WARNING)
+        return self
+
+    def __exit__(
+        self,
+        _exc_type: object,
+        _exc_value: object,
+        _traceback: object,
+    ) -> None:
+        msg = f"""\n
+        Strict editable installation performed using the auxiliary directory:
+            {self.auxiliary_dir}
+
+        Please be careful to not remove this directory, otherwise you might not be able
+        to import/use your package.
+        """
+        InformationOnly.emit("Editable installation.", msg)
+
+
+class _TopLevelFinder:
+    def __init__(self, dist: Distribution, name: str) -> None:
+        self.dist = dist
+        self.name = name
+
+    def template_vars(self) -> tuple[str, str, dict[str, str], dict[str, list[str]]]:
+        src_root = self.dist.src_root or os.curdir
+        top_level = chain(_find_packages(self.dist), _find_top_level_modules(self.dist))
+        package_dir = self.dist.package_dir or {}
+        roots = _find_package_roots(top_level, package_dir, src_root)
+
+        namespaces_ = dict(
+            chain(
+                _find_namespaces(self.dist.packages or [], roots),
+                ((ns, []) for ns in _find_virtual_namespaces(roots)),
+            )
+        )
+
+        legacy_namespaces = {
+            pkg: find_package_path(pkg, roots, self.dist.src_root or "")
+            for pkg in self.dist.namespace_packages or []
+        }
+
+        mapping = {**roots, **legacy_namespaces}
+        # ^-- We need to explicitly add the legacy_namespaces to the mapping to be
+        #     able to import their modules even if another package sharing the same
+        #     namespace is installed in a conventional (non-editable) way.
+
+        name = f"__editable__.{self.name}.finder"
+        finder = _normalization.safe_identifier(name)
+        return finder, name, mapping, namespaces_
+
+    def get_implementation(self) -> Iterator[tuple[str, bytes]]:
+        finder, name, mapping, namespaces_ = self.template_vars()
+
+        content = bytes(_finder_template(name, mapping, namespaces_), "utf-8")
+        yield (f"{finder}.py", content)
+
+        content = _encode_pth(f"import {finder}; {finder}.install()")
+        yield (f"__editable__.{self.name}.pth", content)
+
+    def __call__(self, wheel: WheelFile, files: list[str], mapping: Mapping[str, str]):
+        for file, content in self.get_implementation():
+            wheel.writestr(file, content)
+
+    def __enter__(self) -> Self:
+        msg = "Editable install will be performed using a meta path finder.\n"
+        _logger.warning(msg + _LENIENT_WARNING)
+        return self
+
+    def __exit__(
+        self,
+        _exc_type: object,
+        _exc_value: object,
+        _traceback: object,
+    ) -> None:
+        msg = """\n
+        Please be careful with folders in your working directory with the same
+        name as your package as they may take precedence during imports.
+        """
+        InformationOnly.emit("Editable installation.", msg)
+
+
+def _encode_pth(content: str) -> bytes:
+    """
+    Prior to Python 3.13 (see https://github.com/python/cpython/issues/77102),
+    .pth files are always read with 'locale' encoding, the recommendation
+    from the cpython core developers is to write them as ``open(path, "w")``
+    and ignore warnings (see python/cpython#77102, pypa/setuptools#3937).
+    This function tries to simulate this behavior without having to create an
+    actual file, in a way that supports a range of active Python versions.
+    (There seems to be some variety in the way different version of Python handle
+    ``encoding=None``, not all of them use ``locale.getpreferredencoding(False)``
+    or ``locale.getencoding()``).
+    """
+    with io.BytesIO() as buffer:
+        wrapper = io.TextIOWrapper(buffer, encoding=py312.PTH_ENCODING)
+        # TODO: Python 3.13 replace the whole function with `bytes(content, "utf-8")`
+        wrapper.write(content)
+        wrapper.flush()
+        buffer.seek(0)
+        return buffer.read()
+
+
+def _can_symlink_files(base_dir: Path) -> bool:
+    with TemporaryDirectory(dir=str(base_dir.resolve())) as tmp:
+        path1, path2 = Path(tmp, "file1.txt"), Path(tmp, "file2.txt")
+        path1.write_text("file1", encoding="utf-8")
+        with suppress(AttributeError, NotImplementedError, OSError):
+            os.symlink(path1, path2)
+            if path2.is_symlink() and path2.read_text(encoding="utf-8") == "file1":
+                return True
+
+        try:
+            os.link(path1, path2)  # Ensure hard links can be created
+        except Exception as ex:
+            msg = (
+                "File system does not seem to support either symlinks or hard links. "
+                "Strict editable installs require one of them to be supported."
+            )
+            raise LinksNotSupported(msg) from ex
+        return False
+
+
+def _simple_layout(
+    packages: Iterable[str], package_dir: dict[str, str], project_dir: StrPath
+) -> bool:
+    """Return ``True`` if:
+    - all packages are contained by the same parent directory, **and**
+    - all packages become importable if the parent directory is added to ``sys.path``.
+
+    >>> _simple_layout(['a'], {"": "src"}, "/tmp/myproj")
+    True
+    >>> _simple_layout(['a', 'a.b'], {"": "src"}, "/tmp/myproj")
+    True
+    >>> _simple_layout(['a', 'a.b'], {}, "/tmp/myproj")
+    True
+    >>> _simple_layout(['a', 'a.a1', 'a.a1.a2', 'b'], {"": "src"}, "/tmp/myproj")
+    True
+    >>> _simple_layout(['a', 'a.a1', 'a.a1.a2', 'b'], {"a": "a", "b": "b"}, ".")
+    True
+    >>> _simple_layout(['a', 'a.a1', 'a.a1.a2', 'b'], {"a": "_a", "b": "_b"}, ".")
+    False
+    >>> _simple_layout(['a', 'a.a1', 'a.a1.a2', 'b'], {"a": "_a"}, "/tmp/myproj")
+    False
+    >>> _simple_layout(['a', 'a.a1', 'a.a1.a2', 'b'], {"a.a1.a2": "_a2"}, ".")
+    False
+    >>> _simple_layout(['a', 'a.b'], {"": "src", "a.b": "_ab"}, "/tmp/myproj")
+    False
+    >>> # Special cases, no packages yet:
+    >>> _simple_layout([], {"": "src"}, "/tmp/myproj")
+    True
+    >>> _simple_layout([], {"a": "_a", "": "src"}, "/tmp/myproj")
+    False
+    """
+    layout = {pkg: find_package_path(pkg, package_dir, project_dir) for pkg in packages}
+    if not layout:
+        return set(package_dir) in ({}, {""})
+    parent = os.path.commonpath(starmap(_parent_path, layout.items()))
+    return all(
+        _path.same_path(Path(parent, *key.split('.')), value)
+        for key, value in layout.items()
+    )
+
+
+def _parent_path(pkg, pkg_path):
+    """Infer the parent path containing a package, that if added to ``sys.path`` would
+    allow importing that package.
+    When ``pkg`` is directly mapped into a directory with a different name, return its
+    own path.
+    >>> _parent_path("a", "src/a")
+    'src'
+    >>> _parent_path("b", "src/c")
+    'src/c'
+    """
+    parent = pkg_path[: -len(pkg)] if pkg_path.endswith(pkg) else pkg_path
+    return parent.rstrip("/" + os.sep)
+
+
+def _find_packages(dist: Distribution) -> Iterator[str]:
+    yield from iter(dist.packages or [])
+
+    py_modules = dist.py_modules or []
+    nested_modules = [mod for mod in py_modules if "." in mod]
+    if dist.ext_package:
+        yield dist.ext_package
+    else:
+        ext_modules = dist.ext_modules or []
+        nested_modules += [x.name for x in ext_modules if "." in x.name]
+
+    for module in nested_modules:
+        package, _, _ = module.rpartition(".")
+        yield package
+
+
+def _find_top_level_modules(dist: Distribution) -> Iterator[str]:
+    py_modules = dist.py_modules or []
+    yield from (mod for mod in py_modules if "." not in mod)
+
+    if not dist.ext_package:
+        ext_modules = dist.ext_modules or []
+        yield from (x.name for x in ext_modules if "." not in x.name)
+
+
+def _find_package_roots(
+    packages: Iterable[str],
+    package_dir: Mapping[str, str],
+    src_root: StrPath,
+) -> dict[str, str]:
+    pkg_roots: dict[str, str] = {
+        pkg: _absolute_root(find_package_path(pkg, package_dir, src_root))
+        for pkg in sorted(packages)
+    }
+
+    return _remove_nested(pkg_roots)
+
+
+def _absolute_root(path: StrPath) -> str:
+    """Works for packages and top-level modules"""
+    path_ = Path(path)
+    parent = path_.parent
+
+    if path_.exists():
+        return str(path_.resolve())
+    else:
+        return str(parent.resolve() / path_.name)
+
+
+def _find_virtual_namespaces(pkg_roots: dict[str, str]) -> Iterator[str]:
+    """By carefully designing ``package_dir``, it is possible to implement the logical
+    structure of PEP 420 in a package without the corresponding directories.
+
+    Moreover a parent package can be purposefully/accidentally skipped in the discovery
+    phase (e.g. ``find_packages(include=["mypkg.*"])``, when ``mypkg.foo`` is included
+    by ``mypkg`` itself is not).
+    We consider this case to also be a virtual namespace (ignoring the original
+    directory) to emulate a non-editable installation.
+
+    This function will try to find these kinds of namespaces.
+    """
+    for pkg in pkg_roots:
+        if "." not in pkg:
+            continue
+        parts = pkg.split(".")
+        for i in range(len(parts) - 1, 0, -1):
+            partial_name = ".".join(parts[:i])
+            path = Path(find_package_path(partial_name, pkg_roots, ""))
+            if not path.exists() or partial_name not in pkg_roots:
+                # partial_name not in pkg_roots ==> purposefully/accidentally skipped
+                yield partial_name
+
+
+def _find_namespaces(
+    packages: list[str], pkg_roots: dict[str, str]
+) -> Iterator[tuple[str, list[str]]]:
+    for pkg in packages:
+        path = find_package_path(pkg, pkg_roots, "")
+        if Path(path).exists() and not Path(path, "__init__.py").exists():
+            yield (pkg, [path])
+
+
+def _remove_nested(pkg_roots: dict[str, str]) -> dict[str, str]:
+    output = dict(pkg_roots.copy())
+
+    for pkg, path in reversed(list(pkg_roots.items())):
+        if any(
+            pkg != other and _is_nested(pkg, path, other, other_path)
+            for other, other_path in pkg_roots.items()
+        ):
+            output.pop(pkg)
+
+    return output
+
+
+def _is_nested(pkg: str, pkg_path: str, parent: str, parent_path: str) -> bool:
+    """
+    Return ``True`` if ``pkg`` is nested inside ``parent`` both logically and in the
+    file system.
+    >>> _is_nested("a.b", "path/a/b", "a", "path/a")
+    True
+    >>> _is_nested("a.b", "path/a/b", "a", "otherpath/a")
+    False
+    >>> _is_nested("a.b", "path/a/b", "c", "path/c")
+    False
+    >>> _is_nested("a.a", "path/a/a", "a", "path/a")
+    True
+    >>> _is_nested("b.a", "path/b/a", "a", "path/a")
+    False
+    """
+    norm_pkg_path = _path.normpath(pkg_path)
+    rest = pkg.replace(parent, "", 1).strip(".").split(".")
+    return pkg.startswith(parent) and norm_pkg_path == _path.normpath(
+        Path(parent_path, *rest)
+    )
+
+
+def _empty_dir(dir_: _P) -> _P:
+    """Create a directory ensured to be empty. Existing files may be removed."""
+    _shutil.rmtree(dir_, ignore_errors=True)
+    os.makedirs(dir_)
+    return dir_
+
+
+class _NamespaceInstaller(namespaces.Installer):
+    def __init__(self, distribution, installation_dir, editable_name, src_root) -> None:
+        self.distribution = distribution
+        self.src_root = src_root
+        self.installation_dir = installation_dir
+        self.editable_name = editable_name
+        self.outputs: list[str] = []
+        self.dry_run = False
+
+    def _get_nspkg_file(self):
+        """Installation target."""
+        return os.path.join(self.installation_dir, self.editable_name + self.nspkg_ext)
+
+    def _get_root(self):
+        """Where the modules/packages should be loaded from."""
+        return repr(str(self.src_root))
+
+
+_FINDER_TEMPLATE = """\
+from __future__ import annotations
+import sys
+from importlib.machinery import ModuleSpec, PathFinder
+from importlib.machinery import all_suffixes as module_suffixes
+from importlib.util import spec_from_file_location
+from itertools import chain
+from pathlib import Path
+
+MAPPING: dict[str, str] = {mapping!r}
+NAMESPACES: dict[str, list[str]] = {namespaces!r}
+PATH_PLACEHOLDER = {name!r} + ".__path_hook__"
+
+
+class _EditableFinder:  # MetaPathFinder
+    @classmethod
+    def find_spec(cls, fullname: str, path=None, target=None) -> ModuleSpec | None:  # type: ignore
+        # Top-level packages and modules (we know these exist in the FS)
+        if fullname in MAPPING:
+            pkg_path = MAPPING[fullname]
+            return cls._find_spec(fullname, Path(pkg_path))
+
+        # Handle immediate children modules (required for namespaces to work)
+        # To avoid problems with case sensitivity in the file system we delegate
+        # to the importlib.machinery implementation.
+        parent, _, child = fullname.rpartition(".")
+        if parent and parent in MAPPING:
+            return PathFinder.find_spec(fullname, path=[MAPPING[parent]])
+
+        # Other levels of nesting should be handled automatically by importlib
+        # using the parent path.
+        return None
+
+    @classmethod
+    def _find_spec(cls, fullname: str, candidate_path: Path) -> ModuleSpec | None:
+        init = candidate_path / "__init__.py"
+        candidates = (candidate_path.with_suffix(x) for x in module_suffixes())
+        for candidate in chain([init], candidates):
+            if candidate.exists():
+                return spec_from_file_location(fullname, candidate)
+        return None
+
+
+class _EditableNamespaceFinder:  # PathEntryFinder
+    @classmethod
+    def _path_hook(cls, path) -> type[_EditableNamespaceFinder]:
+        if path == PATH_PLACEHOLDER:
+            return cls
+        raise ImportError
+
+    @classmethod
+    def _paths(cls, fullname: str) -> list[str]:
+        paths = NAMESPACES[fullname]
+        if not paths and fullname in MAPPING:
+            paths = [MAPPING[fullname]]
+        # Always add placeholder, for 2 reasons:
+        # 1. __path__ cannot be empty for the spec to be considered namespace.
+        # 2. In the case of nested namespaces, we need to force
+        #    import machinery to query _EditableNamespaceFinder again.
+        return [*paths, PATH_PLACEHOLDER]
+
+    @classmethod
+    def find_spec(cls, fullname: str, target=None) -> ModuleSpec | None:  # type: ignore
+        if fullname in NAMESPACES:
+            spec = ModuleSpec(fullname, None, is_package=True)
+            spec.submodule_search_locations = cls._paths(fullname)
+            return spec
+        return None
+
+    @classmethod
+    def find_module(cls, _fullname) -> None:
+        return None
+
+
+def install():
+    if not any(finder == _EditableFinder for finder in sys.meta_path):
+        sys.meta_path.append(_EditableFinder)
+
+    if not NAMESPACES:
+        return
+
+    if not any(hook == _EditableNamespaceFinder._path_hook for hook in sys.path_hooks):
+        # PathEntryFinder is needed to create NamespaceSpec without private APIS
+        sys.path_hooks.append(_EditableNamespaceFinder._path_hook)
+    if PATH_PLACEHOLDER not in sys.path:
+        sys.path.append(PATH_PLACEHOLDER)  # Used just to trigger the path hook
+"""
+
+
+def _finder_template(
+    name: str, mapping: Mapping[str, str], namespaces: dict[str, list[str]]
+) -> str:
+    """Create a string containing the code for the``MetaPathFinder`` and
+    ``PathEntryFinder``.
+    """
+    mapping = dict(sorted(mapping.items(), key=lambda p: p[0]))
+    return _FINDER_TEMPLATE.format(name=name, mapping=mapping, namespaces=namespaces)
+
+
+class LinksNotSupported(errors.FileError):
+    """File system does not seem to support either symlinks or hard links."""
+
+
+class _DebuggingTips(SetuptoolsWarning):
+    _SUMMARY = "Problem in editable installation."
+    _DETAILS = """
+    An error happened while installing `{project}` in editable mode.
+
+    The following steps are recommended to help debug this problem:
+
+    - Try to install the project normally, without using the editable mode.
+      Does the error still persist?
+      (If it does, try fixing the problem before attempting the editable mode).
+    - If you are using binary extensions, make sure you have all OS-level
+      dependencies installed (e.g. compilers, toolchains, binary libraries, ...).
+    - Try the latest version of setuptools (maybe the error was already fixed).
+    - If you (or your project dependencies) are using any setuptools extension
+      or customization, make sure they support the editable mode.
+
+    After following the steps above, if the problem still persists and
+    you think this is related to how setuptools handles editable installations,
+    please submit a reproducible example
+    (see https://stackoverflow.com/help/minimal-reproducible-example) to:
+
+        https://github.com/pypa/setuptools/issues
+    """
+    _SEE_DOCS = "userguide/development_mode.html"
diff --git a/.venv/lib/python3.12/site-packages/setuptools/command/egg_info.py b/.venv/lib/python3.12/site-packages/setuptools/command/egg_info.py
new file mode 100644
index 00000000..f7763116
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/setuptools/command/egg_info.py
@@ -0,0 +1,720 @@
+"""setuptools.command.egg_info
+
+Create a distribution's .egg-info directory and contents"""
+
+import functools
+import os
+import re
+import sys
+import time
+from collections.abc import Callable
+
+import packaging
+import packaging.requirements
+import packaging.version
+
+import setuptools.unicode_utils as unicode_utils
+from setuptools import Command
+from setuptools.command import bdist_egg
+from setuptools.command.sdist import sdist, walk_revctrl
+from setuptools.command.setopt import edit_config
+from setuptools.glob import glob
+
+from .. import _entry_points, _normalization
+from .._importlib import metadata
+from ..warnings import SetuptoolsDeprecationWarning
+from . import _requirestxt
+
+import distutils.errors
+import distutils.filelist
+from distutils import log
+from distutils.errors import DistutilsInternalError
+from distutils.filelist import FileList as _FileList
+from distutils.util import convert_path
+
+PY_MAJOR = f'{sys.version_info.major}.{sys.version_info.minor}'
+
+
+def translate_pattern(glob):  # noqa: C901  # is too complex (14)  # FIXME
+    """
+    Translate a file path glob like '*.txt' in to a regular expression.
+    This differs from fnmatch.translate which allows wildcards to match
+    directory separators. It also knows about '**/' which matches any number of
+    directories.
+    """
+    pat = ''
+
+    # This will split on '/' within [character classes]. This is deliberate.
+    chunks = glob.split(os.path.sep)
+
+    sep = re.escape(os.sep)
+    valid_char = f'[^{sep}]'
+
+    for c, chunk in enumerate(chunks):
+        last_chunk = c == len(chunks) - 1
+
+        # Chunks that are a literal ** are globstars. They match anything.
+        if chunk == '**':
+            if last_chunk:
+                # Match anything if this is the last component
+                pat += '.*'
+            else:
+                # Match '(name/)*'
+                pat += f'(?:{valid_char}+{sep})*'
+            continue  # Break here as the whole path component has been handled
+
+        # Find any special characters in the remainder
+        i = 0
+        chunk_len = len(chunk)
+        while i < chunk_len:
+            char = chunk[i]
+            if char == '*':
+                # Match any number of name characters
+                pat += valid_char + '*'
+            elif char == '?':
+                # Match a name character
+                pat += valid_char
+            elif char == '[':
+                # Character class
+                inner_i = i + 1
+                # Skip initial !/] chars
+                if inner_i < chunk_len and chunk[inner_i] == '!':
+                    inner_i = inner_i + 1
+                if inner_i < chunk_len and chunk[inner_i] == ']':
+                    inner_i = inner_i + 1
+
+                # Loop till the closing ] is found
+                while inner_i < chunk_len and chunk[inner_i] != ']':
+                    inner_i = inner_i + 1
+
+                if inner_i >= chunk_len:
+                    # Got to the end of the string without finding a closing ]
+                    # Do not treat this as a matching group, but as a literal [
+                    pat += re.escape(char)
+                else:
+                    # Grab the insides of the [brackets]
+                    inner = chunk[i + 1 : inner_i]
+                    char_class = ''
+
+                    # Class negation
+                    if inner[0] == '!':
+                        char_class = '^'
+                        inner = inner[1:]
+
+                    char_class += re.escape(inner)
+                    pat += f'[{char_class}]'
+
+                    # Skip to the end ]
+                    i = inner_i
+            else:
+                pat += re.escape(char)
+            i += 1
+
+        # Join each chunk with the dir separator
+        if not last_chunk:
+            pat += sep
+
+    pat += r'\Z'
+    return re.compile(pat, flags=re.MULTILINE | re.DOTALL)
+
+
+class InfoCommon:
+    tag_build = None
+    tag_date = None
+
+    @property
+    def name(self):
+        return _normalization.safe_name(self.distribution.get_name())
+
+    def tagged_version(self):
+        tagged = self._maybe_tag(self.distribution.get_version())
+        return _normalization.safe_version(tagged)
+
+    def _maybe_tag(self, version):
+        """
+        egg_info may be called more than once for a distribution,
+        in which case the version string already contains all tags.
+        """
+        return (
+            version
+            if self.vtags and self._already_tagged(version)
+            else version + self.vtags
+        )
+
+    def _already_tagged(self, version: str) -> bool:
+        # Depending on their format, tags may change with version normalization.
+        # So in addition the regular tags, we have to search for the normalized ones.
+        return version.endswith(self.vtags) or version.endswith(self._safe_tags())
+
+    def _safe_tags(self) -> str:
+        # To implement this we can rely on `safe_version` pretending to be version 0
+        # followed by tags. Then we simply discard the starting 0 (fake version number)
+        try:
+            return _normalization.safe_version(f"0{self.vtags}")[1:]
+        except packaging.version.InvalidVersion:
+            return _normalization.safe_name(self.vtags.replace(' ', '.'))
+
+    def tags(self) -> str:
+        version = ''
+        if self.tag_build:
+            version += self.tag_build
+        if self.tag_date:
+            version += time.strftime("%Y%m%d")
+        return version
+
+    vtags = property(tags)
+
+
+class egg_info(InfoCommon, Command):
+    description = "create a distribution's .egg-info directory"
+
+    user_options = [
+        (
+            'egg-base=',
+            'e',
+            "directory containing .egg-info directories"
+            " [default: top of the source tree]",
+        ),
+        ('tag-date', 'd', "Add date stamp (e.g. 20050528) to version number"),
+        ('tag-build=', 'b', "Specify explicit tag to add to version number"),
+        ('no-date', 'D', "Don't include date stamp [default]"),
+    ]
+
+    boolean_options = ['tag-date']
+    negative_opt = {
+        'no-date': 'tag-date',
+    }
+
+    def initialize_options(self):
+        self.egg_base = None
+        self.egg_name = None
+        self.egg_info = None
+        self.egg_version = None
+        self.ignore_egg_info_in_manifest = False
+
+    ####################################
+    # allow the 'tag_svn_revision' to be detected and
+    # set, supporting sdists built on older Setuptools.
+    @property
+    def tag_svn_revision(self) -> None:
+        pass
+
+    @tag_svn_revision.setter
+    def tag_svn_revision(self, value):
+        pass
+
+    ####################################
+
+    def save_version_info(self, filename) -> None:
+        """
+        Materialize the value of date into the
+        build tag. Install build keys in a deterministic order
+        to avoid arbitrary reordering on subsequent builds.
+        """
+        # follow the order these keys would have been added
+        # when PYTHONHASHSEED=0
+        egg_info = dict(tag_build=self.tags(), tag_date=0)
+        edit_config(filename, dict(egg_info=egg_info))
+
+    def finalize_options(self) -> None:
+        # Note: we need to capture the current value returned
+        # by `self.tagged_version()`, so we can later update
+        # `self.distribution.metadata.version` without
+        # repercussions.
+        self.egg_name = self.name
+        self.egg_version = self.tagged_version()
+        parsed_version = packaging.version.Version(self.egg_version)
+
+        try:
+            is_version = isinstance(parsed_version, packaging.version.Version)
+            spec = "%s==%s" if is_version else "%s===%s"
+            packaging.requirements.Requirement(spec % (self.egg_name, self.egg_version))
+        except ValueError as e:
+            raise distutils.errors.DistutilsOptionError(
+                f"Invalid distribution name or version syntax: {self.egg_name}-{self.egg_version}"
+            ) from e
+
+        if self.egg_base is None:
+            dirs = self.distribution.package_dir
+            self.egg_base = (dirs or {}).get('', os.curdir)
+
+        self.ensure_dirname('egg_base')
+        self.egg_info = _normalization.filename_component(self.egg_name) + '.egg-info'
+        if self.egg_base != os.curdir:
+            self.egg_info = os.path.join(self.egg_base, self.egg_info)
+
+        # Set package version for the benefit of dumber commands
+        # (e.g. sdist, bdist_wininst, etc.)
+        #
+        self.distribution.metadata.version = self.egg_version
+
+    def _get_egg_basename(self, py_version=PY_MAJOR, platform=None):
+        """Compute filename of the output egg. Private API."""
+        return _egg_basename(self.egg_name, self.egg_version, py_version, platform)
+
+    def write_or_delete_file(self, what, filename, data, force: bool = False) -> None:
+        """Write `data` to `filename` or delete if empty
+
+        If `data` is non-empty, this routine is the same as ``write_file()``.
+        If `data` is empty but not ``None``, this is the same as calling
+        ``delete_file(filename)`.  If `data` is ``None``, then this is a no-op
+        unless `filename` exists, in which case a warning is issued about the
+        orphaned file (if `force` is false), or deleted (if `force` is true).
+        """
+        if data:
+            self.write_file(what, filename, data)
+        elif os.path.exists(filename):
+            if data is None and not force:
+                log.warn("%s not set in setup(), but %s exists", what, filename)
+                return
+            else:
+                self.delete_file(filename)
+
+    def write_file(self, what, filename, data) -> None:
+        """Write `data` to `filename` (if not a dry run) after announcing it
+
+        `what` is used in a log message to identify what is being written
+        to the file.
+        """
+        log.info("writing %s to %s", what, filename)
+        data = data.encode("utf-8")
+        if not self.dry_run:
+            f = open(filename, 'wb')
+            f.write(data)
+            f.close()
+
+    def delete_file(self, filename) -> None:
+        """Delete `filename` (if not a dry run) after announcing it"""
+        log.info("deleting %s", filename)
+        if not self.dry_run:
+            os.unlink(filename)
+
+    def run(self) -> None:
+        # Pre-load to avoid iterating over entry-points while an empty .egg-info
+        # exists in sys.path. See pypa/pyproject-hooks#206
+        writers = list(metadata.entry_points(group='egg_info.writers'))
+
+        self.mkpath(self.egg_info)
+        try:
+            os.utime(self.egg_info, None)
+        except OSError as e:
+            msg = f"Cannot update time stamp of directory '{self.egg_info}'"
+            raise distutils.errors.DistutilsFileError(msg) from e
+        for ep in writers:
+            writer = ep.load()
+            writer(self, ep.name, os.path.join(self.egg_info, ep.name))
+
+        # Get rid of native_libs.txt if it was put there by older bdist_egg
+        nl = os.path.join(self.egg_info, "native_libs.txt")
+        if os.path.exists(nl):
+            self.delete_file(nl)
+
+        self.find_sources()
+
+    def find_sources(self) -> None:
+        """Generate SOURCES.txt manifest file"""
+        manifest_filename = os.path.join(self.egg_info, "SOURCES.txt")
+        mm = manifest_maker(self.distribution)
+        mm.ignore_egg_info_dir = self.ignore_egg_info_in_manifest
+        mm.manifest = manifest_filename
+        mm.run()
+        self.filelist = mm.filelist
+
+
+class FileList(_FileList):
+    # Implementations of the various MANIFEST.in commands
+
+    def __init__(
+        self, warn=None, debug_print=None, ignore_egg_info_dir: bool = False
+    ) -> None:
+        super().__init__(warn, debug_print)
+        self.ignore_egg_info_dir = ignore_egg_info_dir
+
+    def process_template_line(self, line) -> None:
+        # Parse the line: split it up, make sure the right number of words
+        # is there, and return the relevant words.  'action' is always
+        # defined: it's the first word of the line.  Which of the other
+        # three are defined depends on the action; it'll be either
+        # patterns, (dir and patterns), or (dir_pattern).
+        (action, patterns, dir, dir_pattern) = self._parse_template_line(line)
+
+        action_map: dict[str, Callable] = {
+            'include': self.include,
+            'exclude': self.exclude,
+            'global-include': self.global_include,
+            'global-exclude': self.global_exclude,
+            'recursive-include': functools.partial(
+                self.recursive_include,
+                dir,
+            ),
+            'recursive-exclude': functools.partial(
+                self.recursive_exclude,
+                dir,
+            ),
+            'graft': self.graft,
+            'prune': self.prune,
+        }
+        log_map = {
+            'include': "warning: no files found matching '%s'",
+            'exclude': ("warning: no previously-included files found matching '%s'"),
+            'global-include': (
+                "warning: no files found matching '%s' anywhere in distribution"
+            ),
+            'global-exclude': (
+                "warning: no previously-included files matching "
+                "'%s' found anywhere in distribution"
+            ),
+            'recursive-include': (
+                "warning: no files found matching '%s' under directory '%s'"
+            ),
+            'recursive-exclude': (
+                "warning: no previously-included files matching "
+                "'%s' found under directory '%s'"
+            ),
+            'graft': "warning: no directories found matching '%s'",
+            'prune': "no previously-included directories found matching '%s'",
+        }
+
+        try:
+            process_action = action_map[action]
+        except KeyError:
+            msg = f"Invalid MANIFEST.in: unknown action {action!r} in {line!r}"
+            raise DistutilsInternalError(msg) from None
+
+        # OK, now we know that the action is valid and we have the
+        # right number of words on the line for that action -- so we
+        # can proceed with minimal error-checking.
+
+        action_is_recursive = action.startswith('recursive-')
+        if action in {'graft', 'prune'}:
+            patterns = [dir_pattern]
+        extra_log_args = (dir,) if action_is_recursive else ()
+        log_tmpl = log_map[action]
+
+        self.debug_print(
+            ' '.join(
+                [action] + ([dir] if action_is_recursive else []) + patterns,
+            )
+        )
+        for pattern in patterns:
+            if not process_action(pattern):
+                log.warn(log_tmpl, pattern, *extra_log_args)
+
+    def _remove_files(self, predicate):
+        """
+        Remove all files from the file list that match the predicate.
+        Return True if any matching files were removed
+        """
+        found = False
+        for i in range(len(self.files) - 1, -1, -1):
+            if predicate(self.files[i]):
+                self.debug_print(" removing " + self.files[i])
+                del self.files[i]
+                found = True
+        return found
+
+    def include(self, pattern):
+        """Include files that match 'pattern'."""
+        found = [f for f in glob(pattern) if not os.path.isdir(f)]
+        self.extend(found)
+        return bool(found)
+
+    def exclude(self, pattern):
+        """Exclude files that match 'pattern'."""
+        match = translate_pattern(pattern)
+        return self._remove_files(match.match)
+
+    def recursive_include(self, dir, pattern):
+        """
+        Include all files anywhere in 'dir/' that match the pattern.
+        """
+        full_pattern = os.path.join(dir, '**', pattern)
+        found = [f for f in glob(full_pattern, recursive=True) if not os.path.isdir(f)]
+        self.extend(found)
+        return bool(found)
+
+    def recursive_exclude(self, dir, pattern):
+        """
+        Exclude any file anywhere in 'dir/' that match the pattern.
+        """
+        match = translate_pattern(os.path.join(dir, '**', pattern))
+        return self._remove_files(match.match)
+
+    def graft(self, dir):
+        """Include all files from 'dir/'."""
+        found = [
+            item
+            for match_dir in glob(dir)
+            for item in distutils.filelist.findall(match_dir)
+        ]
+        self.extend(found)
+        return bool(found)
+
+    def prune(self, dir):
+        """Filter out files from 'dir/'."""
+        match = translate_pattern(os.path.join(dir, '**'))
+        return self._remove_files(match.match)
+
+    def global_include(self, pattern):
+        """
+        Include all files anywhere in the current directory that match the
+        pattern. This is very inefficient on large file trees.
+        """
+        if self.allfiles is None:
+            self.findall()
+        match = translate_pattern(os.path.join('**', pattern))
+        found = [f for f in self.allfiles if match.match(f)]
+        self.extend(found)
+        return bool(found)
+
+    def global_exclude(self, pattern):
+        """
+        Exclude all files anywhere that match the pattern.
+        """
+        match = translate_pattern(os.path.join('**', pattern))
+        return self._remove_files(match.match)
+
+    def append(self, item) -> None:
+        if item.endswith('\r'):  # Fix older sdists built on Windows
+            item = item[:-1]
+        path = convert_path(item)
+
+        if self._safe_path(path):
+            self.files.append(path)
+
+    def extend(self, paths) -> None:
+        self.files.extend(filter(self._safe_path, paths))
+
+    def _repair(self):
+        """
+        Replace self.files with only safe paths
+
+        Because some owners of FileList manipulate the underlying
+        ``files`` attribute directly, this method must be called to
+        repair those paths.
+        """
+        self.files = list(filter(self._safe_path, self.files))
+
+    def _safe_path(self, path):
+        enc_warn = "'%s' not %s encodable -- skipping"
+
+        # To avoid accidental trans-codings errors, first to unicode
+        u_path = unicode_utils.filesys_decode(path)
+        if u_path is None:
+            log.warn(f"'{path}' in unexpected encoding -- skipping")
+            return False
+
+        # Must ensure utf-8 encodability
+        utf8_path = unicode_utils.try_encode(u_path, "utf-8")
+        if utf8_path is None:
+            log.warn(enc_warn, path, 'utf-8')
+            return False
+
+        try:
+            # ignore egg-info paths
+            is_egg_info = ".egg-info" in u_path or b".egg-info" in utf8_path
+            if self.ignore_egg_info_dir and is_egg_info:
+                return False
+            # accept is either way checks out
+            if os.path.exists(u_path) or os.path.exists(utf8_path):
+                return True
+        # this will catch any encode errors decoding u_path
+        except UnicodeEncodeError:
+            log.warn(enc_warn, path, sys.getfilesystemencoding())
+
+
+class manifest_maker(sdist):
+    template = "MANIFEST.in"
+
+    def initialize_options(self) -> None:
+        self.use_defaults = True
+        self.prune = True
+        self.manifest_only = True
+        self.force_manifest = True
+        self.ignore_egg_info_dir = False
+
+    def finalize_options(self) -> None:
+        pass
+
+    def run(self) -> None:
+        self.filelist = FileList(ignore_egg_info_dir=self.ignore_egg_info_dir)
+        if not os.path.exists(self.manifest):
+            self.write_manifest()  # it must exist so it'll get in the list
+        self.add_defaults()
+        if os.path.exists(self.template):
+            self.read_template()
+        self.add_license_files()
+        self._add_referenced_files()
+        self.prune_file_list()
+        self.filelist.sort()
+        self.filelist.remove_duplicates()
+        self.write_manifest()
+
+    def _manifest_normalize(self, path):
+        path = unicode_utils.filesys_decode(path)
+        return path.replace(os.sep, '/')
+
+    def write_manifest(self) -> None:
+        """
+        Write the file list in 'self.filelist' to the manifest file
+        named by 'self.manifest'.
+        """
+        self.filelist._repair()
+
+        # Now _repairs should encodability, but not unicode
+        files = [self._manifest_normalize(f) for f in self.filelist.files]
+        msg = f"writing manifest file '{self.manifest}'"
+        self.execute(write_file, (self.manifest, files), msg)
+
+    def warn(self, msg) -> None:
+        if not self._should_suppress_warning(msg):
+            sdist.warn(self, msg)
+
+    @staticmethod
+    def _should_suppress_warning(msg):
+        """
+        suppress missing-file warnings from sdist
+        """
+        return re.match(r"standard file .*not found", msg)
+
+    def add_defaults(self) -> None:
+        sdist.add_defaults(self)
+        self.filelist.append(self.template)
+        self.filelist.append(self.manifest)
+        rcfiles = list(walk_revctrl())
+        if rcfiles:
+            self.filelist.extend(rcfiles)
+        elif os.path.exists(self.manifest):
+            self.read_manifest()
+
+        if os.path.exists("setup.py"):
+            # setup.py should be included by default, even if it's not
+            # the script called to create the sdist
+            self.filelist.append("setup.py")
+
+        ei_cmd = self.get_finalized_command('egg_info')
+        self.filelist.graft(ei_cmd.egg_info)
+
+    def add_license_files(self) -> None:
+        license_files = self.distribution.metadata.license_files or []
+        for lf in license_files:
+            log.info("adding license file '%s'", lf)
+        self.filelist.extend(license_files)
+
+    def _add_referenced_files(self):
+        """Add files referenced by the config (e.g. `file:` directive) to filelist"""
+        referenced = getattr(self.distribution, '_referenced_files', [])
+        # ^-- fallback if dist comes from distutils or is a custom class
+        for rf in referenced:
+            log.debug("adding file referenced by config '%s'", rf)
+        self.filelist.extend(referenced)
+
+    def _safe_data_files(self, build_py):
+        """
+        The parent class implementation of this method
+        (``sdist``) will try to include data files, which
+        might cause recursion problems when
+        ``include_package_data=True``.
+
+        Therefore, avoid triggering any attempt of
+        analyzing/building the manifest again.
+        """
+        if hasattr(build_py, 'get_data_files_without_manifest'):
+            return build_py.get_data_files_without_manifest()
+
+        SetuptoolsDeprecationWarning.emit(
+            "`build_py` command does not inherit from setuptools' `build_py`.",
+            """
+            Custom 'build_py' does not implement 'get_data_files_without_manifest'.
+            Please extend command classes from setuptools instead of distutils.
+            """,
+            see_url="https://peps.python.org/pep-0632/",
+            # due_date not defined yet, old projects might still do it?
+        )
+        return build_py.get_data_files()
+
+
+def write_file(filename, contents) -> None:
+    """Create a file with the specified name and write 'contents' (a
+    sequence of strings without line terminators) to it.
+    """
+    contents = "\n".join(contents)
+
+    # assuming the contents has been vetted for utf-8 encoding
+    contents = contents.encode("utf-8")
+
+    with open(filename, "wb") as f:  # always write POSIX-style manifest
+        f.write(contents)
+
+
+def write_pkg_info(cmd, basename, filename) -> None:
+    log.info("writing %s", filename)
+    if not cmd.dry_run:
+        metadata = cmd.distribution.metadata
+        metadata.version, oldver = cmd.egg_version, metadata.version
+        metadata.name, oldname = cmd.egg_name, metadata.name
+
+        try:
+            # write unescaped data to PKG-INFO, so older pkg_resources
+            # can still parse it
+            metadata.write_pkg_info(cmd.egg_info)
+        finally:
+            metadata.name, metadata.version = oldname, oldver
+
+        safe = getattr(cmd.distribution, 'zip_safe', None)
+
+        bdist_egg.write_safety_flag(cmd.egg_info, safe)
+
+
+def warn_depends_obsolete(cmd, basename, filename) -> None:
+    """
+    Unused: left to avoid errors when updating (from source) from <= 67.8.
+    Old installations have a .dist-info directory with the entry-point
+    ``depends.txt = setuptools.command.egg_info:warn_depends_obsolete``.
+    This may trigger errors when running the first egg_info in build_meta.
+    TODO: Remove this function in a version sufficiently > 68.
+    """
+
+
+# Export API used in entry_points
+write_requirements = _requirestxt.write_requirements
+write_setup_requirements = _requirestxt.write_setup_requirements
+
+
+def write_toplevel_names(cmd, basename, filename) -> None:
+    pkgs = dict.fromkeys([
+        k.split('.', 1)[0] for k in cmd.distribution.iter_distribution_names()
+    ])
+    cmd.write_file("top-level names", filename, '\n'.join(sorted(pkgs)) + '\n')
+
+
+def overwrite_arg(cmd, basename, filename) -> None:
+    write_arg(cmd, basename, filename, True)
+
+
+def write_arg(cmd, basename, filename, force: bool = False) -> None:
+    argname = os.path.splitext(basename)[0]
+    value = getattr(cmd.distribution, argname, None)
+    if value is not None:
+        value = '\n'.join(value) + '\n'
+    cmd.write_or_delete_file(argname, filename, value, force)
+
+
+def write_entries(cmd, basename, filename) -> None:
+    eps = _entry_points.load(cmd.distribution.entry_points)
+    defn = _entry_points.render(eps)
+    cmd.write_or_delete_file('entry points', filename, defn, True)
+
+
+def _egg_basename(egg_name, egg_version, py_version=None, platform=None):
+    """Compute filename of the output egg. Private API."""
+    name = _normalization.filename_component(egg_name)
+    version = _normalization.filename_component(egg_version)
+    egg = f"{name}-{version}-py{py_version or PY_MAJOR}"
+    if platform:
+        egg += f"-{platform}"
+    return egg
+
+
+class EggInfoDeprecationWarning(SetuptoolsDeprecationWarning):
+    """Deprecated behavior warning for EggInfo, bypassing suppression."""
diff --git a/.venv/lib/python3.12/site-packages/setuptools/command/install.py b/.venv/lib/python3.12/site-packages/setuptools/command/install.py
new file mode 100644
index 00000000..15ef3646
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/setuptools/command/install.py
@@ -0,0 +1,183 @@
+from __future__ import annotations
+
+import glob
+import inspect
+import platform
+from collections.abc import Callable
+from typing import TYPE_CHECKING, Any, ClassVar, cast
+
+import setuptools
+
+from ..dist import Distribution
+from ..warnings import SetuptoolsDeprecationWarning, SetuptoolsWarning
+from .bdist_egg import bdist_egg as bdist_egg_cls
+
+import distutils.command.install as orig
+from distutils.errors import DistutilsArgError
+
+if TYPE_CHECKING:
+    # This is only used for a type-cast, don't import at runtime or it'll cause deprecation warnings
+    from .easy_install import easy_install as easy_install_cls
+else:
+    easy_install_cls = None
+
+
+def __getattr__(name: str):  # pragma: no cover
+    if name == "_install":
+        SetuptoolsDeprecationWarning.emit(
+            "`setuptools.command._install` was an internal implementation detail "
+            + "that was left in for numpy<1.9 support.",
+            due_date=(2025, 5, 2),  # Originally added on 2024-11-01
+        )
+        return orig.install
+    raise AttributeError(f"module {__name__!r} has no attribute {name!r}")
+
+
+class install(orig.install):
+    """Use easy_install to install the package, w/dependencies"""
+
+    distribution: Distribution  # override distutils.dist.Distribution with setuptools.dist.Distribution
+
+    user_options = orig.install.user_options + [
+        ('old-and-unmanageable', None, "Try not to use this!"),
+        (
+            'single-version-externally-managed',
+            None,
+            "used by system package builders to create 'flat' eggs",
+        ),
+    ]
+    boolean_options = orig.install.boolean_options + [
+        'old-and-unmanageable',
+        'single-version-externally-managed',
+    ]
+    # Type the same as distutils.command.install.install.sub_commands
+    # Must keep the second tuple item potentially None due to invariance
+    new_commands: ClassVar[list[tuple[str, Callable[[Any], bool] | None]]] = [
+        ('install_egg_info', lambda self: True),
+        ('install_scripts', lambda self: True),
+    ]
+    _nc = dict(new_commands)
+
+    def initialize_options(self):
+        SetuptoolsDeprecationWarning.emit(
+            "setup.py install is deprecated.",
+            """
+            Please avoid running ``setup.py`` directly.
+            Instead, use pypa/build, pypa/installer or other
+            standards-based tools.
+            """,
+            see_url="https://blog.ganssle.io/articles/2021/10/setup-py-deprecated.html",
+            # TODO: Document how to bootstrap setuptools without install
+            #       (e.g. by unzipping the wheel file)
+            #       and then add a due_date to this warning.
+        )
+
+        super().initialize_options()
+        self.old_and_unmanageable = None
+        self.single_version_externally_managed = None
+
+    def finalize_options(self) -> None:
+        super().finalize_options()
+        if self.root:
+            self.single_version_externally_managed = True
+        elif self.single_version_externally_managed:
+            if not self.root and not self.record:
+                raise DistutilsArgError(
+                    "You must specify --record or --root when building system packages"
+                )
+
+    def handle_extra_path(self):
+        if self.root or self.single_version_externally_managed:
+            # explicit backward-compatibility mode, allow extra_path to work
+            return orig.install.handle_extra_path(self)
+
+        # Ignore extra_path when installing an egg (or being run by another
+        # command without --root or --single-version-externally-managed
+        self.path_file = None
+        self.extra_dirs = ''
+        return None
+
+    def run(self):
+        # Explicit request for old-style install?  Just do it
+        if self.old_and_unmanageable or self.single_version_externally_managed:
+            return super().run()
+
+        if not self._called_from_setup(inspect.currentframe()):
+            # Run in backward-compatibility mode to support bdist_* commands.
+            super().run()
+        else:
+            self.do_egg_install()
+
+        return None
+
+    @staticmethod
+    def _called_from_setup(run_frame):
+        """
+        Attempt to detect whether run() was called from setup() or by another
+        command.  If called by setup(), the parent caller will be the
+        'run_command' method in 'distutils.dist', and *its* caller will be
+        the 'run_commands' method.  If called any other way, the
+        immediate caller *might* be 'run_command', but it won't have been
+        called by 'run_commands'. Return True in that case or if a call stack
+        is unavailable. Return False otherwise.
+        """
+        if run_frame is None:
+            msg = "Call stack not available. bdist_* commands may fail."
+            SetuptoolsWarning.emit(msg)
+            if platform.python_implementation() == 'IronPython':
+                msg = "For best results, pass -X:Frames to enable call stack."
+                SetuptoolsWarning.emit(msg)
+            return True
+
+        frames = inspect.getouterframes(run_frame)
+        for frame in frames[2:4]:
+            (caller,) = frame[:1]
+            info = inspect.getframeinfo(caller)
+            caller_module = caller.f_globals.get('__name__', '')
+
+            if caller_module == "setuptools.dist" and info.function == "run_command":
+                # Starting from v61.0.0 setuptools overwrites dist.run_command
+                continue
+
+            return caller_module == 'distutils.dist' and info.function == 'run_commands'
+
+        return False
+
+    def do_egg_install(self) -> None:
+        easy_install = self.distribution.get_command_class('easy_install')
+
+        cmd = cast(
+            # We'd want to cast easy_install as type[easy_install_cls] but a bug in
+            # mypy makes it think easy_install() returns a Command on Python 3.12+
+            # https://github.com/python/mypy/issues/18088
+            easy_install_cls,
+            easy_install(  # type: ignore[call-arg]
+                self.distribution,
+                args="x",
+                root=self.root,
+                record=self.record,
+            ),
+        )
+        cmd.ensure_finalized()  # finalize before bdist_egg munges install cmd
+        cmd.always_copy_from = '.'  # make sure local-dir eggs get installed
+
+        # pick up setup-dir .egg files only: no .egg-info
+        cmd.package_index.scan(glob.glob('*.egg'))
+
+        self.run_command('bdist_egg')
+        bdist_egg = cast(bdist_egg_cls, self.distribution.get_command_obj('bdist_egg'))
+        args = [bdist_egg.egg_output]
+
+        if setuptools.bootstrap_install_from:
+            # Bootstrap self-installation of setuptools
+            args.insert(0, setuptools.bootstrap_install_from)
+
+        cmd.args = args
+        cmd.run(show_deprecation=False)
+        setuptools.bootstrap_install_from = None
+
+
+# XXX Python 3.1 doesn't see _nc if this is inside the class
+install.sub_commands = [
+    cmd for cmd in orig.install.sub_commands if cmd[0] not in install._nc
+] + install.new_commands
diff --git a/.venv/lib/python3.12/site-packages/setuptools/command/install_egg_info.py b/.venv/lib/python3.12/site-packages/setuptools/command/install_egg_info.py
new file mode 100644
index 00000000..44f22ccf
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/setuptools/command/install_egg_info.py
@@ -0,0 +1,58 @@
+import os
+
+from setuptools import Command, namespaces
+from setuptools.archive_util import unpack_archive
+
+from .._path import ensure_directory
+
+from distutils import dir_util, log
+
+
+class install_egg_info(namespaces.Installer, Command):
+    """Install an .egg-info directory for the package"""
+
+    description = "Install an .egg-info directory for the package"
+
+    user_options = [
+        ('install-dir=', 'd', "directory to install to"),
+    ]
+
+    def initialize_options(self):
+        self.install_dir = None
+
+    def finalize_options(self) -> None:
+        self.set_undefined_options('install_lib', ('install_dir', 'install_dir'))
+        ei_cmd = self.get_finalized_command("egg_info")
+        basename = f"{ei_cmd._get_egg_basename()}.egg-info"
+        self.source = ei_cmd.egg_info
+        self.target = os.path.join(self.install_dir, basename)
+        self.outputs: list[str] = []
+
+    def run(self) -> None:
+        self.run_command('egg_info')
+        if os.path.isdir(self.target) and not os.path.islink(self.target):
+            dir_util.remove_tree(self.target, dry_run=self.dry_run)
+        elif os.path.exists(self.target):
+            self.execute(os.unlink, (self.target,), "Removing " + self.target)
+        if not self.dry_run:
+            ensure_directory(self.target)
+        self.execute(self.copytree, (), f"Copying {self.source} to {self.target}")
+        self.install_namespaces()
+
+    def get_outputs(self):
+        return self.outputs
+
+    def copytree(self) -> None:
+        # Copy the .egg-info tree to site-packages
+        def skimmer(src, dst):
+            # filter out source-control directories; note that 'src' is always
+            # a '/'-separated path, regardless of platform.  'dst' is a
+            # platform-specific path.
+            for skip in '.svn/', 'CVS/':
+                if src.startswith(skip) or '/' + skip in src:
+                    return None
+            self.outputs.append(dst)
+            log.debug("Copying %s to %s", src, dst)
+            return dst
+
+        unpack_archive(self.source, self.target, skimmer)
diff --git a/.venv/lib/python3.12/site-packages/setuptools/command/install_lib.py b/.venv/lib/python3.12/site-packages/setuptools/command/install_lib.py
new file mode 100644
index 00000000..8e1e0727
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/setuptools/command/install_lib.py
@@ -0,0 +1,137 @@
+from __future__ import annotations
+
+import os
+import sys
+from itertools import product, starmap
+
+from .._path import StrPath
+from ..dist import Distribution
+
+import distutils.command.install_lib as orig
+
+
+class install_lib(orig.install_lib):
+    """Don't add compiled flags to filenames of non-Python files"""
+
+    distribution: Distribution  # override distutils.dist.Distribution with setuptools.dist.Distribution
+
+    def run(self) -> None:
+        self.build()
+        outfiles = self.install()
+        if outfiles is not None:
+            # always compile, in case we have any extension stubs to deal with
+            self.byte_compile(outfiles)
+
+    def get_exclusions(self):
+        """
+        Return a collections.Sized collections.Container of paths to be
+        excluded for single_version_externally_managed installations.
+        """
+        all_packages = (
+            pkg
+            for ns_pkg in self._get_SVEM_NSPs()
+            for pkg in self._all_packages(ns_pkg)
+        )
+
+        excl_specs = product(all_packages, self._gen_exclusion_paths())
+        return set(starmap(self._exclude_pkg_path, excl_specs))
+
+    def _exclude_pkg_path(self, pkg, exclusion_path):
+        """
+        Given a package name and exclusion path within that package,
+        compute the full exclusion path.
+        """
+        parts = pkg.split('.') + [exclusion_path]
+        return os.path.join(self.install_dir, *parts)
+
+    @staticmethod
+    def _all_packages(pkg_name):
+        """
+        >>> list(install_lib._all_packages('foo.bar.baz'))
+        ['foo.bar.baz', 'foo.bar', 'foo']
+        """
+        while pkg_name:
+            yield pkg_name
+            pkg_name, _sep, _child = pkg_name.rpartition('.')
+
+    def _get_SVEM_NSPs(self):
+        """
+        Get namespace packages (list) but only for
+        single_version_externally_managed installations and empty otherwise.
+        """
+        # TODO: is it necessary to short-circuit here? i.e. what's the cost
+        # if get_finalized_command is called even when namespace_packages is
+        # False?
+        if not self.distribution.namespace_packages:
+            return []
+
+        install_cmd = self.get_finalized_command('install')
+        svem = install_cmd.single_version_externally_managed
+
+        return self.distribution.namespace_packages if svem else []
+
+    @staticmethod
+    def _gen_exclusion_paths():
+        """
+        Generate file paths to be excluded for namespace packages (bytecode
+        cache files).
+        """
+        # always exclude the package module itself
+        yield '__init__.py'
+
+        yield '__init__.pyc'
+        yield '__init__.pyo'
+
+        if not hasattr(sys, 'implementation'):
+            return
+
+        base = os.path.join('__pycache__', '__init__.' + sys.implementation.cache_tag)
+        yield base + '.pyc'
+        yield base + '.pyo'
+        yield base + '.opt-1.pyc'
+        yield base + '.opt-2.pyc'
+
+    def copy_tree(
+        self,
+        infile: StrPath,
+        outfile: str,
+        # override: Using actual booleans
+        preserve_mode: bool = True,  # type: ignore[override]
+        preserve_times: bool = True,  # type: ignore[override]
+        preserve_symlinks: bool = False,  # type: ignore[override]
+        level: object = 1,
+    ) -> list[str]:
+        assert preserve_mode
+        assert preserve_times
+        assert not preserve_symlinks
+        exclude = self.get_exclusions()
+
+        if not exclude:
+            return orig.install_lib.copy_tree(self, infile, outfile)
+
+        # Exclude namespace package __init__.py* files from the output
+
+        from setuptools.archive_util import unpack_directory
+
+        from distutils import log
+
+        outfiles: list[str] = []
+
+        def pf(src: str, dst: str):
+            if dst in exclude:
+                log.warn("Skipping installation of %s (namespace package)", dst)
+                return False
+
+            log.info("copying %s -> %s", src, os.path.dirname(dst))
+            outfiles.append(dst)
+            return dst
+
+        unpack_directory(infile, outfile, pf)
+        return outfiles
+
+    def get_outputs(self):
+        outputs = orig.install_lib.get_outputs(self)
+        exclude = self.get_exclusions()
+        if exclude:
+            return [f for f in outputs if f not in exclude]
+        return outputs
diff --git a/.venv/lib/python3.12/site-packages/setuptools/command/install_scripts.py b/.venv/lib/python3.12/site-packages/setuptools/command/install_scripts.py
new file mode 100644
index 00000000..4401cf69
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/setuptools/command/install_scripts.py
@@ -0,0 +1,73 @@
+from __future__ import annotations
+
+import os
+import sys
+
+from .._path import ensure_directory
+from ..dist import Distribution
+
+import distutils.command.install_scripts as orig
+from distutils import log
+
+
+class install_scripts(orig.install_scripts):
+    """Do normal script install, plus any egg_info wrapper scripts"""
+
+    distribution: Distribution  # override distutils.dist.Distribution with setuptools.dist.Distribution
+
+    def initialize_options(self) -> None:
+        orig.install_scripts.initialize_options(self)
+        self.no_ep = False
+
+    def run(self) -> None:
+        self.run_command("egg_info")
+        if self.distribution.scripts:
+            orig.install_scripts.run(self)  # run first to set up self.outfiles
+        else:
+            self.outfiles: list[str] = []
+        if self.no_ep:
+            # don't install entry point scripts into .egg file!
+            return
+        self._install_ep_scripts()
+
+    def _install_ep_scripts(self):
+        # Delay import side-effects
+        from pkg_resources import Distribution, PathMetadata
+
+        from . import easy_install as ei
+
+        ei_cmd = self.get_finalized_command("egg_info")
+        dist = Distribution(
+            ei_cmd.egg_base,
+            PathMetadata(ei_cmd.egg_base, ei_cmd.egg_info),
+            ei_cmd.egg_name,
+            ei_cmd.egg_version,
+        )
+        bs_cmd = self.get_finalized_command('build_scripts')
+        exec_param = getattr(bs_cmd, 'executable', None)
+        writer = ei.ScriptWriter
+        if exec_param == sys.executable:
+            # In case the path to the Python executable contains a space, wrap
+            # it so it's not split up.
+            exec_param = [exec_param]
+        # resolve the writer to the environment
+        writer = writer.best()
+        cmd = writer.command_spec_class.best().from_param(exec_param)
+        for args in writer.get_args(dist, cmd.as_header()):
+            self.write_script(*args)
+
+    def write_script(self, script_name, contents, mode: str = "t", *ignored) -> None:
+        """Write an executable file to the scripts directory"""
+        from setuptools.command.easy_install import chmod, current_umask
+
+        log.info("Installing %s script to %s", script_name, self.install_dir)
+        target = os.path.join(self.install_dir, script_name)
+        self.outfiles.append(target)
+
+        encoding = None if "b" in mode else "utf-8"
+        mask = current_umask()
+        if not self.dry_run:
+            ensure_directory(target)
+            with open(target, "w" + mode, encoding=encoding) as f:
+                f.write(contents)
+            chmod(target, 0o777 - mask)
diff --git a/.venv/lib/python3.12/site-packages/setuptools/command/launcher manifest.xml b/.venv/lib/python3.12/site-packages/setuptools/command/launcher manifest.xml
new file mode 100644
index 00000000..5972a96d
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/setuptools/command/launcher manifest.xml
@@ -0,0 +1,15 @@
+<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
+<assembly xmlns="urn:schemas-microsoft-com:asm.v1" manifestVersion="1.0">
+    <assemblyIdentity version="1.0.0.0"
+                      processorArchitecture="X86"
+                      name="%(name)s"
+                      type="win32"/>
+    <!-- Identify the application security requirements. -->
+    <trustInfo xmlns="urn:schemas-microsoft-com:asm.v3">
+        <security>
+            <requestedPrivileges>
+                <requestedExecutionLevel level="asInvoker" uiAccess="false"/>
+            </requestedPrivileges>
+        </security>
+    </trustInfo>
+</assembly>
diff --git a/.venv/lib/python3.12/site-packages/setuptools/command/rotate.py b/.venv/lib/python3.12/site-packages/setuptools/command/rotate.py
new file mode 100644
index 00000000..acdce07b
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/setuptools/command/rotate.py
@@ -0,0 +1,65 @@
+from __future__ import annotations
+
+import os
+from typing import ClassVar
+
+from .. import Command, _shutil
+
+from distutils import log
+from distutils.errors import DistutilsOptionError
+from distutils.util import convert_path
+
+
+class rotate(Command):
+    """Delete older distributions"""
+
+    description = "delete older distributions, keeping N newest files"
+    user_options = [
+        ('match=', 'm', "patterns to match (required)"),
+        ('dist-dir=', 'd', "directory where the distributions are"),
+        ('keep=', 'k', "number of matching distributions to keep"),
+    ]
+
+    boolean_options: ClassVar[list[str]] = []
+
+    def initialize_options(self):
+        self.match = None
+        self.dist_dir = None
+        self.keep = None
+
+    def finalize_options(self) -> None:
+        if self.match is None:
+            raise DistutilsOptionError(
+                "Must specify one or more (comma-separated) match patterns "
+                "(e.g. '.zip' or '.egg')"
+            )
+        if self.keep is None:
+            raise DistutilsOptionError("Must specify number of files to keep")
+        try:
+            self.keep = int(self.keep)
+        except ValueError as e:
+            raise DistutilsOptionError("--keep must be an integer") from e
+        if isinstance(self.match, str):
+            self.match = [convert_path(p.strip()) for p in self.match.split(',')]
+        self.set_undefined_options('bdist', ('dist_dir', 'dist_dir'))
+
+    def run(self) -> None:
+        self.run_command("egg_info")
+        from glob import glob
+
+        for pattern in self.match:
+            pattern = self.distribution.get_name() + '*' + pattern
+            files = glob(os.path.join(self.dist_dir, pattern))
+            files = [(os.path.getmtime(f), f) for f in files]
+            files.sort()
+            files.reverse()
+
+            log.info("%d file(s) matching %s", len(files), pattern)
+            files = files[self.keep :]
+            for t, f in files:
+                log.info("Deleting %s", f)
+                if not self.dry_run:
+                    if os.path.isdir(f):
+                        _shutil.rmtree(f)
+                    else:
+                        os.unlink(f)
diff --git a/.venv/lib/python3.12/site-packages/setuptools/command/saveopts.py b/.venv/lib/python3.12/site-packages/setuptools/command/saveopts.py
new file mode 100644
index 00000000..2a2cbce6
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/setuptools/command/saveopts.py
@@ -0,0 +1,21 @@
+from setuptools.command.setopt import edit_config, option_base
+
+
+class saveopts(option_base):
+    """Save command-line options to a file"""
+
+    description = "save supplied options to setup.cfg or other config file"
+
+    def run(self) -> None:
+        dist = self.distribution
+        settings: dict[str, dict[str, str]] = {}
+
+        for cmd in dist.command_options:
+            if cmd == 'saveopts':
+                continue  # don't save our own options!
+
+            for opt, (src, val) in dist.get_option_dict(cmd).items():
+                if src == "command line":
+                    settings.setdefault(cmd, {})[opt] = val
+
+        edit_config(self.filename, settings, self.dry_run)
diff --git a/.venv/lib/python3.12/site-packages/setuptools/command/sdist.py b/.venv/lib/python3.12/site-packages/setuptools/command/sdist.py
new file mode 100644
index 00000000..9631cf31
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/setuptools/command/sdist.py
@@ -0,0 +1,217 @@
+from __future__ import annotations
+
+import contextlib
+import os
+import re
+from itertools import chain
+from typing import ClassVar
+
+from .._importlib import metadata
+from ..dist import Distribution
+from .build import _ORIGINAL_SUBCOMMANDS
+
+import distutils.command.sdist as orig
+from distutils import log
+
+_default_revctrl = list
+
+
+def walk_revctrl(dirname=''):
+    """Find all files under revision control"""
+    for ep in metadata.entry_points(group='setuptools.file_finders'):
+        yield from ep.load()(dirname)
+
+
+class sdist(orig.sdist):
+    """Smart sdist that finds anything supported by revision control"""
+
+    user_options = [
+        ('formats=', None, "formats for source distribution (comma-separated list)"),
+        (
+            'keep-temp',
+            'k',
+            "keep the distribution tree around after creating " + "archive file(s)",
+        ),
+        (
+            'dist-dir=',
+            'd',
+            "directory to put the source distribution archive(s) in [default: dist]",
+        ),
+        (
+            'owner=',
+            'u',
+            "Owner name used when creating a tar file [default: current user]",
+        ),
+        (
+            'group=',
+            'g',
+            "Group name used when creating a tar file [default: current group]",
+        ),
+    ]
+
+    distribution: Distribution  # override distutils.dist.Distribution with setuptools.dist.Distribution
+    negative_opt: ClassVar[dict[str, str]] = {}
+
+    README_EXTENSIONS = ['', '.rst', '.txt', '.md']
+    READMES = tuple(f'README{ext}' for ext in README_EXTENSIONS)
+
+    def run(self) -> None:
+        self.run_command('egg_info')
+        ei_cmd = self.get_finalized_command('egg_info')
+        self.filelist = ei_cmd.filelist
+        self.filelist.append(os.path.join(ei_cmd.egg_info, 'SOURCES.txt'))
+        self.check_readme()
+
+        # Run sub commands
+        for cmd_name in self.get_sub_commands():
+            self.run_command(cmd_name)
+
+        self.make_distribution()
+
+        dist_files = getattr(self.distribution, 'dist_files', [])
+        for file in self.archive_files:
+            data = ('sdist', '', file)
+            if data not in dist_files:
+                dist_files.append(data)
+
+    def initialize_options(self) -> None:
+        orig.sdist.initialize_options(self)
+
+    def make_distribution(self) -> None:
+        """
+        Workaround for #516
+        """
+        with self._remove_os_link():
+            orig.sdist.make_distribution(self)
+
+    @staticmethod
+    @contextlib.contextmanager
+    def _remove_os_link():
+        """
+        In a context, remove and restore os.link if it exists
+        """
+
+        class NoValue:
+            pass
+
+        orig_val = getattr(os, 'link', NoValue)
+        try:
+            del os.link
+        except Exception:
+            pass
+        try:
+            yield
+        finally:
+            if orig_val is not NoValue:
+                os.link = orig_val
+
+    def add_defaults(self) -> None:
+        super().add_defaults()
+        self._add_defaults_build_sub_commands()
+
+    def _add_defaults_optional(self):
+        super()._add_defaults_optional()
+        if os.path.isfile('pyproject.toml'):
+            self.filelist.append('pyproject.toml')
+
+    def _add_defaults_python(self):
+        """getting python files"""
+        if self.distribution.has_pure_modules():
+            build_py = self.get_finalized_command('build_py')
+            self.filelist.extend(build_py.get_source_files())
+            self._add_data_files(self._safe_data_files(build_py))
+
+    def _add_defaults_build_sub_commands(self):
+        build = self.get_finalized_command("build")
+        missing_cmds = set(build.get_sub_commands()) - _ORIGINAL_SUBCOMMANDS
+        # ^-- the original built-in sub-commands are already handled by default.
+        cmds = (self.get_finalized_command(c) for c in missing_cmds)
+        files = (c.get_source_files() for c in cmds if hasattr(c, "get_source_files"))
+        self.filelist.extend(chain.from_iterable(files))
+
+    def _safe_data_files(self, build_py):
+        """
+        Since the ``sdist`` class is also used to compute the MANIFEST
+        (via :obj:`setuptools.command.egg_info.manifest_maker`),
+        there might be recursion problems when trying to obtain the list of
+        data_files and ``include_package_data=True`` (which in turn depends on
+        the files included in the MANIFEST).
+
+        To avoid that, ``manifest_maker`` should be able to overwrite this
+        method and avoid recursive attempts to build/analyze the MANIFEST.
+        """
+        return build_py.data_files
+
+    def _add_data_files(self, data_files):
+        """
+        Add data files as found in build_py.data_files.
+        """
+        self.filelist.extend(
+            os.path.join(src_dir, name)
+            for _, src_dir, _, filenames in data_files
+            for name in filenames
+        )
+
+    def _add_defaults_data_files(self):
+        try:
+            super()._add_defaults_data_files()
+        except TypeError:
+            log.warn("data_files contains unexpected objects")
+
+    def prune_file_list(self) -> None:
+        super().prune_file_list()
+        # Prevent accidental inclusion of test-related cache dirs at the project root
+        sep = re.escape(os.sep)
+        self.filelist.exclude_pattern(r"^(\.tox|\.nox|\.venv)" + sep, is_regex=True)
+
+    def check_readme(self) -> None:
+        for f in self.READMES:
+            if os.path.exists(f):
+                return
+        else:
+            self.warn(
+                "standard file not found: should have one of " + ', '.join(self.READMES)
+            )
+
+    def make_release_tree(self, base_dir, files) -> None:
+        orig.sdist.make_release_tree(self, base_dir, files)
+
+        # Save any egg_info command line options used to create this sdist
+        dest = os.path.join(base_dir, 'setup.cfg')
+        if hasattr(os, 'link') and os.path.exists(dest):
+            # unlink and re-copy, since it might be hard-linked, and
+            # we don't want to change the source version
+            os.unlink(dest)
+            self.copy_file('setup.cfg', dest)
+
+        self.get_finalized_command('egg_info').save_version_info(dest)
+
+    def _manifest_is_not_generated(self):
+        # check for special comment used in 2.7.1 and higher
+        if not os.path.isfile(self.manifest):
+            return False
+
+        with open(self.manifest, 'rb') as fp:
+            first_line = fp.readline()
+        return first_line != b'# file GENERATED by distutils, do NOT edit\n'
+
+    def read_manifest(self):
+        """Read the manifest file (named by 'self.manifest') and use it to
+        fill in 'self.filelist', the list of files to include in the source
+        distribution.
+        """
+        log.info("reading manifest file '%s'", self.manifest)
+        manifest = open(self.manifest, 'rb')
+        for bytes_line in manifest:
+            # The manifest must contain UTF-8. See #303.
+            try:
+                line = bytes_line.decode('UTF-8')
+            except UnicodeDecodeError:
+                log.warn(f"{line!r} not UTF-8 decodable -- skipping")
+                continue
+            # ignore comments and blank lines
+            line = line.strip()
+            if line.startswith('#') or not line:
+                continue
+            self.filelist.append(line)
+        manifest.close()
diff --git a/.venv/lib/python3.12/site-packages/setuptools/command/setopt.py b/.venv/lib/python3.12/site-packages/setuptools/command/setopt.py
new file mode 100644
index 00000000..678a0593
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/setuptools/command/setopt.py
@@ -0,0 +1,141 @@
+import configparser
+import os
+
+from .. import Command
+from ..unicode_utils import _cfg_read_utf8_with_fallback
+
+import distutils
+from distutils import log
+from distutils.errors import DistutilsOptionError
+from distutils.util import convert_path
+
+__all__ = ['config_file', 'edit_config', 'option_base', 'setopt']
+
+
+def config_file(kind="local"):
+    """Get the filename of the distutils, local, global, or per-user config
+
+    `kind` must be one of "local", "global", or "user"
+    """
+    if kind == 'local':
+        return 'setup.cfg'
+    if kind == 'global':
+        return os.path.join(os.path.dirname(distutils.__file__), 'distutils.cfg')
+    if kind == 'user':
+        dot = os.name == 'posix' and '.' or ''
+        return os.path.expanduser(convert_path(f"~/{dot}pydistutils.cfg"))
+    raise ValueError("config_file() type must be 'local', 'global', or 'user'", kind)
+
+
+def edit_config(filename, settings, dry_run=False):
+    """Edit a configuration file to include `settings`
+
+    `settings` is a dictionary of dictionaries or ``None`` values, keyed by
+    command/section name.  A ``None`` value means to delete the entire section,
+    while a dictionary lists settings to be changed or deleted in that section.
+    A setting of ``None`` means to delete that setting.
+    """
+    log.debug("Reading configuration from %s", filename)
+    opts = configparser.RawConfigParser()
+    opts.optionxform = lambda optionstr: optionstr  # type: ignore[method-assign] # overriding method
+    _cfg_read_utf8_with_fallback(opts, filename)
+
+    for section, options in settings.items():
+        if options is None:
+            log.info("Deleting section [%s] from %s", section, filename)
+            opts.remove_section(section)
+        else:
+            if not opts.has_section(section):
+                log.debug("Adding new section [%s] to %s", section, filename)
+                opts.add_section(section)
+            for option, value in options.items():
+                if value is None:
+                    log.debug("Deleting %s.%s from %s", section, option, filename)
+                    opts.remove_option(section, option)
+                    if not opts.options(section):
+                        log.info(
+                            "Deleting empty [%s] section from %s", section, filename
+                        )
+                        opts.remove_section(section)
+                else:
+                    log.debug(
+                        "Setting %s.%s to %r in %s", section, option, value, filename
+                    )
+                    opts.set(section, option, value)
+
+    log.info("Writing %s", filename)
+    if not dry_run:
+        with open(filename, 'w', encoding="utf-8") as f:
+            opts.write(f)
+
+
+class option_base(Command):
+    """Abstract base class for commands that mess with config files"""
+
+    user_options = [
+        ('global-config', 'g', "save options to the site-wide distutils.cfg file"),
+        ('user-config', 'u', "save options to the current user's pydistutils.cfg file"),
+        ('filename=', 'f', "configuration file to use (default=setup.cfg)"),
+    ]
+
+    boolean_options = [
+        'global-config',
+        'user-config',
+    ]
+
+    def initialize_options(self):
+        self.global_config = None
+        self.user_config = None
+        self.filename = None
+
+    def finalize_options(self):
+        filenames = []
+        if self.global_config:
+            filenames.append(config_file('global'))
+        if self.user_config:
+            filenames.append(config_file('user'))
+        if self.filename is not None:
+            filenames.append(self.filename)
+        if not filenames:
+            filenames.append(config_file('local'))
+        if len(filenames) > 1:
+            raise DistutilsOptionError(
+                "Must specify only one configuration file option", filenames
+            )
+        (self.filename,) = filenames
+
+
+class setopt(option_base):
+    """Save command-line options to a file"""
+
+    description = "set an option in setup.cfg or another config file"
+
+    user_options = [
+        ('command=', 'c', 'command to set an option for'),
+        ('option=', 'o', 'option to set'),
+        ('set-value=', 's', 'value of the option'),
+        ('remove', 'r', 'remove (unset) the value'),
+    ] + option_base.user_options
+
+    boolean_options = option_base.boolean_options + ['remove']
+
+    def initialize_options(self):
+        option_base.initialize_options(self)
+        self.command = None
+        self.option = None
+        self.set_value = None
+        self.remove = None
+
+    def finalize_options(self) -> None:
+        option_base.finalize_options(self)
+        if self.command is None or self.option is None:
+            raise DistutilsOptionError("Must specify --command *and* --option")
+        if self.set_value is None and not self.remove:
+            raise DistutilsOptionError("Must specify --set-value or --remove")
+
+    def run(self) -> None:
+        edit_config(
+            self.filename,
+            {self.command: {self.option.replace('-', '_'): self.set_value}},
+            self.dry_run,
+        )
diff --git a/.venv/lib/python3.12/site-packages/setuptools/command/test.py b/.venv/lib/python3.12/site-packages/setuptools/command/test.py
new file mode 100644
index 00000000..341b11a2
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/setuptools/command/test.py
@@ -0,0 +1,45 @@
+from __future__ import annotations
+
+from setuptools import Command
+from setuptools.warnings import SetuptoolsDeprecationWarning
+
+
+# Would restrict to Literal["test"], but mypy doesn't support it: https://github.com/python/mypy/issues/8203
+def __getattr__(name: str) -> type[_test]:
+    if name == 'test':
+        SetuptoolsDeprecationWarning.emit(
+            "The test command is disabled and references to it are deprecated.",
+            "Please remove any references to `setuptools.command.test` in all "
+            "supported versions of the affected package.",
+            due_date=(2024, 11, 15),
+            stacklevel=2,
+        )
+        return _test
+    raise AttributeError(name)
+
+
+class _test(Command):
+    """
+    Stub to warn when test command is referenced or used.
+    """
+
+    description = "stub for old test command (do not use)"
+
+    user_options = [
+        ('test-module=', 'm', "Run 'test_suite' in specified module"),
+        (
+            'test-suite=',
+            's',
+            "Run single test, case or suite (e.g. 'module.test_suite')",
+        ),
+        ('test-runner=', 'r', "Test runner to use"),
+    ]
+
+    def initialize_options(self):
+        pass
+
+    def finalize_options(self):
+        pass
+
+    def run(self):
+        raise RuntimeError("Support for the test command was removed in Setuptools 72")