about summary refs log tree commit diff
path: root/.venv/lib/python3.12/site-packages/setuptools/_distutils/command
diff options
context:
space:
mode:
Diffstat (limited to '.venv/lib/python3.12/site-packages/setuptools/_distutils/command')
-rw-r--r--.venv/lib/python3.12/site-packages/setuptools/_distutils/command/__init__.py23
-rw-r--r--.venv/lib/python3.12/site-packages/setuptools/_distutils/command/_framework_compat.py54
-rw-r--r--.venv/lib/python3.12/site-packages/setuptools/_distutils/command/bdist.py167
-rw-r--r--.venv/lib/python3.12/site-packages/setuptools/_distutils/command/bdist_dumb.py141
-rw-r--r--.venv/lib/python3.12/site-packages/setuptools/_distutils/command/bdist_rpm.py598
-rw-r--r--.venv/lib/python3.12/site-packages/setuptools/_distutils/command/build.py156
-rw-r--r--.venv/lib/python3.12/site-packages/setuptools/_distutils/command/build_clib.py201
-rw-r--r--.venv/lib/python3.12/site-packages/setuptools/_distutils/command/build_ext.py812
-rw-r--r--.venv/lib/python3.12/site-packages/setuptools/_distutils/command/build_py.py407
-rw-r--r--.venv/lib/python3.12/site-packages/setuptools/_distutils/command/build_scripts.py160
-rw-r--r--.venv/lib/python3.12/site-packages/setuptools/_distutils/command/check.py152
-rw-r--r--.venv/lib/python3.12/site-packages/setuptools/_distutils/command/clean.py77
-rw-r--r--.venv/lib/python3.12/site-packages/setuptools/_distutils/command/config.py360
-rw-r--r--.venv/lib/python3.12/site-packages/setuptools/_distutils/command/install.py805
-rw-r--r--.venv/lib/python3.12/site-packages/setuptools/_distutils/command/install_data.py94
-rw-r--r--.venv/lib/python3.12/site-packages/setuptools/_distutils/command/install_egg_info.py91
-rw-r--r--.venv/lib/python3.12/site-packages/setuptools/_distutils/command/install_headers.py46
-rw-r--r--.venv/lib/python3.12/site-packages/setuptools/_distutils/command/install_lib.py238
-rw-r--r--.venv/lib/python3.12/site-packages/setuptools/_distutils/command/install_scripts.py62
-rw-r--r--.venv/lib/python3.12/site-packages/setuptools/_distutils/command/sdist.py521
20 files changed, 5165 insertions, 0 deletions
diff --git a/.venv/lib/python3.12/site-packages/setuptools/_distutils/command/__init__.py b/.venv/lib/python3.12/site-packages/setuptools/_distutils/command/__init__.py
new file mode 100644
index 00000000..0f8a1692
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/setuptools/_distutils/command/__init__.py
@@ -0,0 +1,23 @@
+"""distutils.command
+
+Package containing implementation of all the standard Distutils
+commands."""
+
+__all__ = [
+    'build',
+    'build_py',
+    'build_ext',
+    'build_clib',
+    'build_scripts',
+    'clean',
+    'install',
+    'install_lib',
+    'install_headers',
+    'install_scripts',
+    'install_data',
+    'sdist',
+    'bdist',
+    'bdist_dumb',
+    'bdist_rpm',
+    'check',
+]
diff --git a/.venv/lib/python3.12/site-packages/setuptools/_distutils/command/_framework_compat.py b/.venv/lib/python3.12/site-packages/setuptools/_distutils/command/_framework_compat.py
new file mode 100644
index 00000000..00d34bc7
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/setuptools/_distutils/command/_framework_compat.py
@@ -0,0 +1,54 @@
+"""
+Backward compatibility for homebrew builds on macOS.
+"""
+
+import functools
+import os
+import subprocess
+import sys
+import sysconfig
+
+
+@functools.lru_cache
+def enabled():
+    """
+    Only enabled for Python 3.9 framework homebrew builds
+    except ensurepip and venv.
+    """
+    PY39 = (3, 9) < sys.version_info < (3, 10)
+    framework = sys.platform == 'darwin' and sys._framework
+    homebrew = "Cellar" in sysconfig.get_config_var('projectbase')
+    venv = sys.prefix != sys.base_prefix
+    ensurepip = os.environ.get("ENSUREPIP_OPTIONS")
+    return PY39 and framework and homebrew and not venv and not ensurepip
+
+
+schemes = dict(
+    osx_framework_library=dict(
+        stdlib='{installed_base}/{platlibdir}/python{py_version_short}',
+        platstdlib='{platbase}/{platlibdir}/python{py_version_short}',
+        purelib='{homebrew_prefix}/lib/python{py_version_short}/site-packages',
+        platlib='{homebrew_prefix}/{platlibdir}/python{py_version_short}/site-packages',
+        include='{installed_base}/include/python{py_version_short}{abiflags}',
+        platinclude='{installed_platbase}/include/python{py_version_short}{abiflags}',
+        scripts='{homebrew_prefix}/bin',
+        data='{homebrew_prefix}',
+    )
+)
+
+
+@functools.lru_cache
+def vars():
+    if not enabled():
+        return {}
+    homebrew_prefix = subprocess.check_output(['brew', '--prefix'], text=True).strip()
+    return locals()
+
+
+def scheme(name):
+    """
+    Override the selected scheme for posix_prefix.
+    """
+    if not enabled() or not name.endswith('_prefix'):
+        return name
+    return 'osx_framework_library'
diff --git a/.venv/lib/python3.12/site-packages/setuptools/_distutils/command/bdist.py b/.venv/lib/python3.12/site-packages/setuptools/_distutils/command/bdist.py
new file mode 100644
index 00000000..07811aab
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/setuptools/_distutils/command/bdist.py
@@ -0,0 +1,167 @@
+"""distutils.command.bdist
+
+Implements the Distutils 'bdist' command (create a built [binary]
+distribution)."""
+
+from __future__ import annotations
+
+import os
+import warnings
+from collections.abc import Callable
+from typing import TYPE_CHECKING, ClassVar
+
+from ..core import Command
+from ..errors import DistutilsOptionError, DistutilsPlatformError
+from ..util import get_platform
+
+if TYPE_CHECKING:
+    from typing_extensions import deprecated
+else:
+
+    def deprecated(message):
+        return lambda fn: fn
+
+
+def show_formats():
+    """Print list of available formats (arguments to "--format" option)."""
+    from ..fancy_getopt import FancyGetopt
+
+    formats = [
+        ("formats=" + format, None, bdist.format_commands[format][1])
+        for format in bdist.format_commands
+    ]
+    pretty_printer = FancyGetopt(formats)
+    pretty_printer.print_help("List of available distribution formats:")
+
+
+class ListCompat(dict[str, tuple[str, str]]):
+    # adapter to allow for Setuptools compatibility in format_commands
+    @deprecated("format_commands is now a dict. append is deprecated.")
+    def append(self, item: object) -> None:
+        warnings.warn(
+            "format_commands is now a dict. append is deprecated.",
+            DeprecationWarning,
+            stacklevel=2,
+        )
+
+
+class bdist(Command):
+    description = "create a built (binary) distribution"
+
+    user_options = [
+        ('bdist-base=', 'b', "temporary directory for creating built distributions"),
+        (
+            'plat-name=',
+            'p',
+            "platform name to embed in generated filenames "
+            f"[default: {get_platform()}]",
+        ),
+        ('formats=', None, "formats for distribution (comma-separated list)"),
+        (
+            'dist-dir=',
+            'd',
+            "directory to put final built distributions in [default: dist]",
+        ),
+        ('skip-build', None, "skip rebuilding everything (for testing/debugging)"),
+        (
+            'owner=',
+            'u',
+            "Owner name used when creating a tar file [default: current user]",
+        ),
+        (
+            'group=',
+            'g',
+            "Group name used when creating a tar file [default: current group]",
+        ),
+    ]
+
+    boolean_options: ClassVar[list[str]] = ['skip-build']
+
+    help_options: ClassVar[list[tuple[str, str | None, str, Callable[[], object]]]] = [
+        ('help-formats', None, "lists available distribution formats", show_formats),
+    ]
+
+    # The following commands do not take a format option from bdist
+    no_format_option: ClassVar[tuple[str, ...]] = ('bdist_rpm',)
+
+    # This won't do in reality: will need to distinguish RPM-ish Linux,
+    # Debian-ish Linux, Solaris, FreeBSD, ..., Windows, Mac OS.
+    default_format: ClassVar[dict[str, str]] = {'posix': 'gztar', 'nt': 'zip'}
+
+    # Define commands in preferred order for the --help-formats option
+    format_commands = ListCompat({
+        'rpm': ('bdist_rpm', "RPM distribution"),
+        'gztar': ('bdist_dumb', "gzip'ed tar file"),
+        'bztar': ('bdist_dumb', "bzip2'ed tar file"),
+        'xztar': ('bdist_dumb', "xz'ed tar file"),
+        'ztar': ('bdist_dumb', "compressed tar file"),
+        'tar': ('bdist_dumb', "tar file"),
+        'zip': ('bdist_dumb', "ZIP file"),
+    })
+
+    # for compatibility until consumers only reference format_commands
+    format_command = format_commands
+
+    def initialize_options(self):
+        self.bdist_base = None
+        self.plat_name = None
+        self.formats = None
+        self.dist_dir = None
+        self.skip_build = False
+        self.group = None
+        self.owner = None
+
+    def finalize_options(self) -> None:
+        # have to finalize 'plat_name' before 'bdist_base'
+        if self.plat_name is None:
+            if self.skip_build:
+                self.plat_name = get_platform()
+            else:
+                self.plat_name = self.get_finalized_command('build').plat_name
+
+        # 'bdist_base' -- parent of per-built-distribution-format
+        # temporary directories (eg. we'll probably have
+        # "build/bdist.<plat>/dumb", "build/bdist.<plat>/rpm", etc.)
+        if self.bdist_base is None:
+            build_base = self.get_finalized_command('build').build_base
+            self.bdist_base = os.path.join(build_base, 'bdist.' + self.plat_name)
+
+        self.ensure_string_list('formats')
+        if self.formats is None:
+            try:
+                self.formats = [self.default_format[os.name]]
+            except KeyError:
+                raise DistutilsPlatformError(
+                    "don't know how to create built distributions "
+                    f"on platform {os.name}"
+                )
+
+        if self.dist_dir is None:
+            self.dist_dir = "dist"
+
+    def run(self) -> None:
+        # Figure out which sub-commands we need to run.
+        commands = []
+        for format in self.formats:
+            try:
+                commands.append(self.format_commands[format][0])
+            except KeyError:
+                raise DistutilsOptionError(f"invalid format '{format}'")
+
+        # Reinitialize and run each command.
+        for i in range(len(self.formats)):
+            cmd_name = commands[i]
+            sub_cmd = self.reinitialize_command(cmd_name)
+            if cmd_name not in self.no_format_option:
+                sub_cmd.format = self.formats[i]
+
+            # passing the owner and group names for tar archiving
+            if cmd_name == 'bdist_dumb':
+                sub_cmd.owner = self.owner
+                sub_cmd.group = self.group
+
+            # If we're going to need to run this command again, tell it to
+            # keep its temporary files around so subsequent runs go faster.
+            if cmd_name in commands[i + 1 :]:
+                sub_cmd.keep_temp = True
+            self.run_command(cmd_name)
diff --git a/.venv/lib/python3.12/site-packages/setuptools/_distutils/command/bdist_dumb.py b/.venv/lib/python3.12/site-packages/setuptools/_distutils/command/bdist_dumb.py
new file mode 100644
index 00000000..ccad66f4
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/setuptools/_distutils/command/bdist_dumb.py
@@ -0,0 +1,141 @@
+"""distutils.command.bdist_dumb
+
+Implements the Distutils 'bdist_dumb' command (create a "dumb" built
+distribution -- i.e., just an archive to be unpacked under $prefix or
+$exec_prefix)."""
+
+import os
+from distutils._log import log
+from typing import ClassVar
+
+from ..core import Command
+from ..dir_util import ensure_relative, remove_tree
+from ..errors import DistutilsPlatformError
+from ..sysconfig import get_python_version
+from ..util import get_platform
+
+
+class bdist_dumb(Command):
+    description = "create a \"dumb\" built distribution"
+
+    user_options = [
+        ('bdist-dir=', 'd', "temporary directory for creating the distribution"),
+        (
+            'plat-name=',
+            'p',
+            "platform name to embed in generated filenames "
+            f"[default: {get_platform()}]",
+        ),
+        (
+            'format=',
+            'f',
+            "archive format to create (tar, gztar, bztar, xztar, ztar, zip)",
+        ),
+        (
+            'keep-temp',
+            'k',
+            "keep the pseudo-installation tree around after creating the distribution archive",
+        ),
+        ('dist-dir=', 'd', "directory to put final built distributions in"),
+        ('skip-build', None, "skip rebuilding everything (for testing/debugging)"),
+        (
+            'relative',
+            None,
+            "build the archive using relative paths [default: false]",
+        ),
+        (
+            'owner=',
+            'u',
+            "Owner name used when creating a tar file [default: current user]",
+        ),
+        (
+            'group=',
+            'g',
+            "Group name used when creating a tar file [default: current group]",
+        ),
+    ]
+
+    boolean_options: ClassVar[list[str]] = ['keep-temp', 'skip-build', 'relative']
+
+    default_format = {'posix': 'gztar', 'nt': 'zip'}
+
+    def initialize_options(self):
+        self.bdist_dir = None
+        self.plat_name = None
+        self.format = None
+        self.keep_temp = False
+        self.dist_dir = None
+        self.skip_build = None
+        self.relative = False
+        self.owner = None
+        self.group = None
+
+    def finalize_options(self):
+        if self.bdist_dir is None:
+            bdist_base = self.get_finalized_command('bdist').bdist_base
+            self.bdist_dir = os.path.join(bdist_base, 'dumb')
+
+        if self.format is None:
+            try:
+                self.format = self.default_format[os.name]
+            except KeyError:
+                raise DistutilsPlatformError(
+                    "don't know how to create dumb built distributions "
+                    f"on platform {os.name}"
+                )
+
+        self.set_undefined_options(
+            'bdist',
+            ('dist_dir', 'dist_dir'),
+            ('plat_name', 'plat_name'),
+            ('skip_build', 'skip_build'),
+        )
+
+    def run(self):
+        if not self.skip_build:
+            self.run_command('build')
+
+        install = self.reinitialize_command('install', reinit_subcommands=True)
+        install.root = self.bdist_dir
+        install.skip_build = self.skip_build
+        install.warn_dir = False
+
+        log.info("installing to %s", self.bdist_dir)
+        self.run_command('install')
+
+        # And make an archive relative to the root of the
+        # pseudo-installation tree.
+        archive_basename = f"{self.distribution.get_fullname()}.{self.plat_name}"
+
+        pseudoinstall_root = os.path.join(self.dist_dir, archive_basename)
+        if not self.relative:
+            archive_root = self.bdist_dir
+        else:
+            if self.distribution.has_ext_modules() and (
+                install.install_base != install.install_platbase
+            ):
+                raise DistutilsPlatformError(
+                    "can't make a dumb built distribution where "
+                    f"base and platbase are different ({install.install_base!r}, {install.install_platbase!r})"
+                )
+            else:
+                archive_root = os.path.join(
+                    self.bdist_dir, ensure_relative(install.install_base)
+                )
+
+        # Make the archive
+        filename = self.make_archive(
+            pseudoinstall_root,
+            self.format,
+            root_dir=archive_root,
+            owner=self.owner,
+            group=self.group,
+        )
+        if self.distribution.has_ext_modules():
+            pyversion = get_python_version()
+        else:
+            pyversion = 'any'
+        self.distribution.dist_files.append(('bdist_dumb', pyversion, filename))
+
+        if not self.keep_temp:
+            remove_tree(self.bdist_dir, dry_run=self.dry_run)
diff --git a/.venv/lib/python3.12/site-packages/setuptools/_distutils/command/bdist_rpm.py b/.venv/lib/python3.12/site-packages/setuptools/_distutils/command/bdist_rpm.py
new file mode 100644
index 00000000..357b4e86
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/setuptools/_distutils/command/bdist_rpm.py
@@ -0,0 +1,598 @@
+"""distutils.command.bdist_rpm
+
+Implements the Distutils 'bdist_rpm' command (create RPM source and binary
+distributions)."""
+
+import os
+import subprocess
+import sys
+from distutils._log import log
+from typing import ClassVar
+
+from ..core import Command
+from ..debug import DEBUG
+from ..errors import (
+    DistutilsExecError,
+    DistutilsFileError,
+    DistutilsOptionError,
+    DistutilsPlatformError,
+)
+from ..file_util import write_file
+from ..sysconfig import get_python_version
+
+
+class bdist_rpm(Command):
+    description = "create an RPM distribution"
+
+    user_options = [
+        ('bdist-base=', None, "base directory for creating built distributions"),
+        (
+            'rpm-base=',
+            None,
+            "base directory for creating RPMs (defaults to \"rpm\" under "
+            "--bdist-base; must be specified for RPM 2)",
+        ),
+        (
+            'dist-dir=',
+            'd',
+            "directory to put final RPM files in (and .spec files if --spec-only)",
+        ),
+        (
+            'python=',
+            None,
+            "path to Python interpreter to hard-code in the .spec file "
+            "[default: \"python\"]",
+        ),
+        (
+            'fix-python',
+            None,
+            "hard-code the exact path to the current Python interpreter in "
+            "the .spec file",
+        ),
+        ('spec-only', None, "only regenerate spec file"),
+        ('source-only', None, "only generate source RPM"),
+        ('binary-only', None, "only generate binary RPM"),
+        ('use-bzip2', None, "use bzip2 instead of gzip to create source distribution"),
+        # More meta-data: too RPM-specific to put in the setup script,
+        # but needs to go in the .spec file -- so we make these options
+        # to "bdist_rpm".  The idea is that packagers would put this
+        # info in setup.cfg, although they are of course free to
+        # supply it on the command line.
+        (
+            'distribution-name=',
+            None,
+            "name of the (Linux) distribution to which this "
+            "RPM applies (*not* the name of the module distribution!)",
+        ),
+        ('group=', None, "package classification [default: \"Development/Libraries\"]"),
+        ('release=', None, "RPM release number"),
+        ('serial=', None, "RPM serial number"),
+        (
+            'vendor=',
+            None,
+            "RPM \"vendor\" (eg. \"Joe Blow <joe@example.com>\") "
+            "[default: maintainer or author from setup script]",
+        ),
+        (
+            'packager=',
+            None,
+            "RPM packager (eg. \"Jane Doe <jane@example.net>\") [default: vendor]",
+        ),
+        ('doc-files=', None, "list of documentation files (space or comma-separated)"),
+        ('changelog=', None, "RPM changelog"),
+        ('icon=', None, "name of icon file"),
+        ('provides=', None, "capabilities provided by this package"),
+        ('requires=', None, "capabilities required by this package"),
+        ('conflicts=', None, "capabilities which conflict with this package"),
+        ('build-requires=', None, "capabilities required to build this package"),
+        ('obsoletes=', None, "capabilities made obsolete by this package"),
+        ('no-autoreq', None, "do not automatically calculate dependencies"),
+        # Actions to take when building RPM
+        ('keep-temp', 'k', "don't clean up RPM build directory"),
+        ('no-keep-temp', None, "clean up RPM build directory [default]"),
+        (
+            'use-rpm-opt-flags',
+            None,
+            "compile with RPM_OPT_FLAGS when building from source RPM",
+        ),
+        ('no-rpm-opt-flags', None, "do not pass any RPM CFLAGS to compiler"),
+        ('rpm3-mode', None, "RPM 3 compatibility mode (default)"),
+        ('rpm2-mode', None, "RPM 2 compatibility mode"),
+        # Add the hooks necessary for specifying custom scripts
+        ('prep-script=', None, "Specify a script for the PREP phase of RPM building"),
+        ('build-script=', None, "Specify a script for the BUILD phase of RPM building"),
+        (
+            'pre-install=',
+            None,
+            "Specify a script for the pre-INSTALL phase of RPM building",
+        ),
+        (
+            'install-script=',
+            None,
+            "Specify a script for the INSTALL phase of RPM building",
+        ),
+        (
+            'post-install=',
+            None,
+            "Specify a script for the post-INSTALL phase of RPM building",
+        ),
+        (
+            'pre-uninstall=',
+            None,
+            "Specify a script for the pre-UNINSTALL phase of RPM building",
+        ),
+        (
+            'post-uninstall=',
+            None,
+            "Specify a script for the post-UNINSTALL phase of RPM building",
+        ),
+        ('clean-script=', None, "Specify a script for the CLEAN phase of RPM building"),
+        (
+            'verify-script=',
+            None,
+            "Specify a script for the VERIFY phase of the RPM build",
+        ),
+        # Allow a packager to explicitly force an architecture
+        ('force-arch=', None, "Force an architecture onto the RPM build process"),
+        ('quiet', 'q', "Run the INSTALL phase of RPM building in quiet mode"),
+    ]
+
+    boolean_options: ClassVar[list[str]] = [
+        'keep-temp',
+        'use-rpm-opt-flags',
+        'rpm3-mode',
+        'no-autoreq',
+        'quiet',
+    ]
+
+    negative_opt: ClassVar[dict[str, str]] = {
+        'no-keep-temp': 'keep-temp',
+        'no-rpm-opt-flags': 'use-rpm-opt-flags',
+        'rpm2-mode': 'rpm3-mode',
+    }
+
+    def initialize_options(self):
+        self.bdist_base = None
+        self.rpm_base = None
+        self.dist_dir = None
+        self.python = None
+        self.fix_python = None
+        self.spec_only = None
+        self.binary_only = None
+        self.source_only = None
+        self.use_bzip2 = None
+
+        self.distribution_name = None
+        self.group = None
+        self.release = None
+        self.serial = None
+        self.vendor = None
+        self.packager = None
+        self.doc_files = None
+        self.changelog = None
+        self.icon = None
+
+        self.prep_script = None
+        self.build_script = None
+        self.install_script = None
+        self.clean_script = None
+        self.verify_script = None
+        self.pre_install = None
+        self.post_install = None
+        self.pre_uninstall = None
+        self.post_uninstall = None
+        self.prep = None
+        self.provides = None
+        self.requires = None
+        self.conflicts = None
+        self.build_requires = None
+        self.obsoletes = None
+
+        self.keep_temp = False
+        self.use_rpm_opt_flags = True
+        self.rpm3_mode = True
+        self.no_autoreq = False
+
+        self.force_arch = None
+        self.quiet = False
+
+    def finalize_options(self) -> None:
+        self.set_undefined_options('bdist', ('bdist_base', 'bdist_base'))
+        if self.rpm_base is None:
+            if not self.rpm3_mode:
+                raise DistutilsOptionError("you must specify --rpm-base in RPM 2 mode")
+            self.rpm_base = os.path.join(self.bdist_base, "rpm")
+
+        if self.python is None:
+            if self.fix_python:
+                self.python = sys.executable
+            else:
+                self.python = "python3"
+        elif self.fix_python:
+            raise DistutilsOptionError(
+                "--python and --fix-python are mutually exclusive options"
+            )
+
+        if os.name != 'posix':
+            raise DistutilsPlatformError(
+                f"don't know how to create RPM distributions on platform {os.name}"
+            )
+        if self.binary_only and self.source_only:
+            raise DistutilsOptionError(
+                "cannot supply both '--source-only' and '--binary-only'"
+            )
+
+        # don't pass CFLAGS to pure python distributions
+        if not self.distribution.has_ext_modules():
+            self.use_rpm_opt_flags = False
+
+        self.set_undefined_options('bdist', ('dist_dir', 'dist_dir'))
+        self.finalize_package_data()
+
+    def finalize_package_data(self) -> None:
+        self.ensure_string('group', "Development/Libraries")
+        self.ensure_string(
+            'vendor',
+            f"{self.distribution.get_contact()} <{self.distribution.get_contact_email()}>",
+        )
+        self.ensure_string('packager')
+        self.ensure_string_list('doc_files')
+        if isinstance(self.doc_files, list):
+            for readme in ('README', 'README.txt'):
+                if os.path.exists(readme) and readme not in self.doc_files:
+                    self.doc_files.append(readme)
+
+        self.ensure_string('release', "1")
+        self.ensure_string('serial')  # should it be an int?
+
+        self.ensure_string('distribution_name')
+
+        self.ensure_string('changelog')
+        # Format changelog correctly
+        self.changelog = self._format_changelog(self.changelog)
+
+        self.ensure_filename('icon')
+
+        self.ensure_filename('prep_script')
+        self.ensure_filename('build_script')
+        self.ensure_filename('install_script')
+        self.ensure_filename('clean_script')
+        self.ensure_filename('verify_script')
+        self.ensure_filename('pre_install')
+        self.ensure_filename('post_install')
+        self.ensure_filename('pre_uninstall')
+        self.ensure_filename('post_uninstall')
+
+        # XXX don't forget we punted on summaries and descriptions -- they
+        # should be handled here eventually!
+
+        # Now *this* is some meta-data that belongs in the setup script...
+        self.ensure_string_list('provides')
+        self.ensure_string_list('requires')
+        self.ensure_string_list('conflicts')
+        self.ensure_string_list('build_requires')
+        self.ensure_string_list('obsoletes')
+
+        self.ensure_string('force_arch')
+
+    def run(self) -> None:  # noqa: C901
+        if DEBUG:
+            print("before _get_package_data():")
+            print("vendor =", self.vendor)
+            print("packager =", self.packager)
+            print("doc_files =", self.doc_files)
+            print("changelog =", self.changelog)
+
+        # make directories
+        if self.spec_only:
+            spec_dir = self.dist_dir
+            self.mkpath(spec_dir)
+        else:
+            rpm_dir = {}
+            for d in ('SOURCES', 'SPECS', 'BUILD', 'RPMS', 'SRPMS'):
+                rpm_dir[d] = os.path.join(self.rpm_base, d)
+                self.mkpath(rpm_dir[d])
+            spec_dir = rpm_dir['SPECS']
+
+        # Spec file goes into 'dist_dir' if '--spec-only specified',
+        # build/rpm.<plat> otherwise.
+        spec_path = os.path.join(spec_dir, f"{self.distribution.get_name()}.spec")
+        self.execute(
+            write_file, (spec_path, self._make_spec_file()), f"writing '{spec_path}'"
+        )
+
+        if self.spec_only:  # stop if requested
+            return
+
+        # Make a source distribution and copy to SOURCES directory with
+        # optional icon.
+        saved_dist_files = self.distribution.dist_files[:]
+        sdist = self.reinitialize_command('sdist')
+        if self.use_bzip2:
+            sdist.formats = ['bztar']
+        else:
+            sdist.formats = ['gztar']
+        self.run_command('sdist')
+        self.distribution.dist_files = saved_dist_files
+
+        source = sdist.get_archive_files()[0]
+        source_dir = rpm_dir['SOURCES']
+        self.copy_file(source, source_dir)
+
+        if self.icon:
+            if os.path.exists(self.icon):
+                self.copy_file(self.icon, source_dir)
+            else:
+                raise DistutilsFileError(f"icon file '{self.icon}' does not exist")
+
+        # build package
+        log.info("building RPMs")
+        rpm_cmd = ['rpmbuild']
+
+        if self.source_only:  # what kind of RPMs?
+            rpm_cmd.append('-bs')
+        elif self.binary_only:
+            rpm_cmd.append('-bb')
+        else:
+            rpm_cmd.append('-ba')
+        rpm_cmd.extend(['--define', f'__python {self.python}'])
+        if self.rpm3_mode:
+            rpm_cmd.extend(['--define', f'_topdir {os.path.abspath(self.rpm_base)}'])
+        if not self.keep_temp:
+            rpm_cmd.append('--clean')
+
+        if self.quiet:
+            rpm_cmd.append('--quiet')
+
+        rpm_cmd.append(spec_path)
+        # Determine the binary rpm names that should be built out of this spec
+        # file
+        # Note that some of these may not be really built (if the file
+        # list is empty)
+        nvr_string = "%{name}-%{version}-%{release}"
+        src_rpm = nvr_string + ".src.rpm"
+        non_src_rpm = "%{arch}/" + nvr_string + ".%{arch}.rpm"
+        q_cmd = rf"rpm -q --qf '{src_rpm} {non_src_rpm}\n' --specfile '{spec_path}'"
+
+        out = os.popen(q_cmd)
+        try:
+            binary_rpms = []
+            source_rpm = None
+            while True:
+                line = out.readline()
+                if not line:
+                    break
+                ell = line.strip().split()
+                assert len(ell) == 2
+                binary_rpms.append(ell[1])
+                # The source rpm is named after the first entry in the spec file
+                if source_rpm is None:
+                    source_rpm = ell[0]
+
+            status = out.close()
+            if status:
+                raise DistutilsExecError(f"Failed to execute: {q_cmd!r}")
+
+        finally:
+            out.close()
+
+        self.spawn(rpm_cmd)
+
+        if not self.dry_run:
+            if self.distribution.has_ext_modules():
+                pyversion = get_python_version()
+            else:
+                pyversion = 'any'
+
+            if not self.binary_only:
+                srpm = os.path.join(rpm_dir['SRPMS'], source_rpm)
+                assert os.path.exists(srpm)
+                self.move_file(srpm, self.dist_dir)
+                filename = os.path.join(self.dist_dir, source_rpm)
+                self.distribution.dist_files.append(('bdist_rpm', pyversion, filename))
+
+            if not self.source_only:
+                for rpm in binary_rpms:
+                    rpm = os.path.join(rpm_dir['RPMS'], rpm)
+                    if os.path.exists(rpm):
+                        self.move_file(rpm, self.dist_dir)
+                        filename = os.path.join(self.dist_dir, os.path.basename(rpm))
+                        self.distribution.dist_files.append((
+                            'bdist_rpm',
+                            pyversion,
+                            filename,
+                        ))
+
+    def _dist_path(self, path):
+        return os.path.join(self.dist_dir, os.path.basename(path))
+
+    def _make_spec_file(self):  # noqa: C901
+        """Generate the text of an RPM spec file and return it as a
+        list of strings (one per line).
+        """
+        # definitions and headers
+        spec_file = [
+            '%define name ' + self.distribution.get_name(),
+            '%define version ' + self.distribution.get_version().replace('-', '_'),
+            '%define unmangled_version ' + self.distribution.get_version(),
+            '%define release ' + self.release.replace('-', '_'),
+            '',
+            'Summary: ' + (self.distribution.get_description() or "UNKNOWN"),
+        ]
+
+        # Workaround for #14443 which affects some RPM based systems such as
+        # RHEL6 (and probably derivatives)
+        vendor_hook = subprocess.getoutput('rpm --eval %{__os_install_post}')
+        # Generate a potential replacement value for __os_install_post (whilst
+        # normalizing the whitespace to simplify the test for whether the
+        # invocation of brp-python-bytecompile passes in __python):
+        vendor_hook = '\n'.join([
+            f'  {line.strip()} \\' for line in vendor_hook.splitlines()
+        ])
+        problem = "brp-python-bytecompile \\\n"
+        fixed = "brp-python-bytecompile %{__python} \\\n"
+        fixed_hook = vendor_hook.replace(problem, fixed)
+        if fixed_hook != vendor_hook:
+            spec_file.append('# Workaround for https://bugs.python.org/issue14443')
+            spec_file.append('%define __os_install_post ' + fixed_hook + '\n')
+
+        # put locale summaries into spec file
+        # XXX not supported for now (hard to put a dictionary
+        # in a config file -- arg!)
+        # for locale in self.summaries.keys():
+        #    spec_file.append('Summary(%s): %s' % (locale,
+        #                                          self.summaries[locale]))
+
+        spec_file.extend([
+            'Name: %{name}',
+            'Version: %{version}',
+            'Release: %{release}',
+        ])
+
+        # XXX yuck! this filename is available from the "sdist" command,
+        # but only after it has run: and we create the spec file before
+        # running "sdist", in case of --spec-only.
+        if self.use_bzip2:
+            spec_file.append('Source0: %{name}-%{unmangled_version}.tar.bz2')
+        else:
+            spec_file.append('Source0: %{name}-%{unmangled_version}.tar.gz')
+
+        spec_file.extend([
+            'License: ' + (self.distribution.get_license() or "UNKNOWN"),
+            'Group: ' + self.group,
+            'BuildRoot: %{_tmppath}/%{name}-%{version}-%{release}-buildroot',
+            'Prefix: %{_prefix}',
+        ])
+
+        if not self.force_arch:
+            # noarch if no extension modules
+            if not self.distribution.has_ext_modules():
+                spec_file.append('BuildArch: noarch')
+        else:
+            spec_file.append(f'BuildArch: {self.force_arch}')
+
+        for field in (
+            'Vendor',
+            'Packager',
+            'Provides',
+            'Requires',
+            'Conflicts',
+            'Obsoletes',
+        ):
+            val = getattr(self, field.lower())
+            if isinstance(val, list):
+                spec_file.append('{}: {}'.format(field, ' '.join(val)))
+            elif val is not None:
+                spec_file.append(f'{field}: {val}')
+
+        if self.distribution.get_url():
+            spec_file.append('Url: ' + self.distribution.get_url())
+
+        if self.distribution_name:
+            spec_file.append('Distribution: ' + self.distribution_name)
+
+        if self.build_requires:
+            spec_file.append('BuildRequires: ' + ' '.join(self.build_requires))
+
+        if self.icon:
+            spec_file.append('Icon: ' + os.path.basename(self.icon))
+
+        if self.no_autoreq:
+            spec_file.append('AutoReq: 0')
+
+        spec_file.extend([
+            '',
+            '%description',
+            self.distribution.get_long_description() or "",
+        ])
+
+        # put locale descriptions into spec file
+        # XXX again, suppressed because config file syntax doesn't
+        # easily support this ;-(
+        # for locale in self.descriptions.keys():
+        #    spec_file.extend([
+        #        '',
+        #        '%description -l ' + locale,
+        #        self.descriptions[locale],
+        #        ])
+
+        # rpm scripts
+        # figure out default build script
+        def_setup_call = f"{self.python} {os.path.basename(sys.argv[0])}"
+        def_build = f"{def_setup_call} build"
+        if self.use_rpm_opt_flags:
+            def_build = 'env CFLAGS="$RPM_OPT_FLAGS" ' + def_build
+
+        # insert contents of files
+
+        # XXX this is kind of misleading: user-supplied options are files
+        # that we open and interpolate into the spec file, but the defaults
+        # are just text that we drop in as-is.  Hmmm.
+
+        install_cmd = f'{def_setup_call} install -O1 --root=$RPM_BUILD_ROOT --record=INSTALLED_FILES'
+
+        script_options = [
+            ('prep', 'prep_script', "%setup -n %{name}-%{unmangled_version}"),
+            ('build', 'build_script', def_build),
+            ('install', 'install_script', install_cmd),
+            ('clean', 'clean_script', "rm -rf $RPM_BUILD_ROOT"),
+            ('verifyscript', 'verify_script', None),
+            ('pre', 'pre_install', None),
+            ('post', 'post_install', None),
+            ('preun', 'pre_uninstall', None),
+            ('postun', 'post_uninstall', None),
+        ]
+
+        for rpm_opt, attr, default in script_options:
+            # Insert contents of file referred to, if no file is referred to
+            # use 'default' as contents of script
+            val = getattr(self, attr)
+            if val or default:
+                spec_file.extend([
+                    '',
+                    '%' + rpm_opt,
+                ])
+                if val:
+                    with open(val) as f:
+                        spec_file.extend(f.read().split('\n'))
+                else:
+                    spec_file.append(default)
+
+        # files section
+        spec_file.extend([
+            '',
+            '%files -f INSTALLED_FILES',
+            '%defattr(-,root,root)',
+        ])
+
+        if self.doc_files:
+            spec_file.append('%doc ' + ' '.join(self.doc_files))
+
+        if self.changelog:
+            spec_file.extend([
+                '',
+                '%changelog',
+            ])
+            spec_file.extend(self.changelog)
+
+        return spec_file
+
+    def _format_changelog(self, changelog):
+        """Format the changelog correctly and convert it to a list of strings"""
+        if not changelog:
+            return changelog
+        new_changelog = []
+        for line in changelog.strip().split('\n'):
+            line = line.strip()
+            if line[0] == '*':
+                new_changelog.extend(['', line])
+            elif line[0] == '-':
+                new_changelog.append(line)
+            else:
+                new_changelog.append('  ' + line)
+
+        # strip trailing newline inserted by first changelog entry
+        if not new_changelog[0]:
+            del new_changelog[0]
+
+        return new_changelog
diff --git a/.venv/lib/python3.12/site-packages/setuptools/_distutils/command/build.py b/.venv/lib/python3.12/site-packages/setuptools/_distutils/command/build.py
new file mode 100644
index 00000000..6a8303a9
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/setuptools/_distutils/command/build.py
@@ -0,0 +1,156 @@
+"""distutils.command.build
+
+Implements the Distutils 'build' command."""
+
+from __future__ import annotations
+
+import os
+import sys
+import sysconfig
+from collections.abc import Callable
+from typing import ClassVar
+
+from ..ccompiler import show_compilers
+from ..core import Command
+from ..errors import DistutilsOptionError
+from ..util import get_platform
+
+
+class build(Command):
+    description = "build everything needed to install"
+
+    user_options = [
+        ('build-base=', 'b', "base directory for build library"),
+        ('build-purelib=', None, "build directory for platform-neutral distributions"),
+        ('build-platlib=', None, "build directory for platform-specific distributions"),
+        (
+            'build-lib=',
+            None,
+            "build directory for all distribution (defaults to either build-purelib or build-platlib",
+        ),
+        ('build-scripts=', None, "build directory for scripts"),
+        ('build-temp=', 't', "temporary build directory"),
+        (
+            'plat-name=',
+            'p',
+            f"platform name to build for, if supported [default: {get_platform()}]",
+        ),
+        ('compiler=', 'c', "specify the compiler type"),
+        ('parallel=', 'j', "number of parallel build jobs"),
+        ('debug', 'g', "compile extensions and libraries with debugging information"),
+        ('force', 'f', "forcibly build everything (ignore file timestamps)"),
+        ('executable=', 'e', "specify final destination interpreter path (build.py)"),
+    ]
+
+    boolean_options: ClassVar[list[str]] = ['debug', 'force']
+
+    help_options: ClassVar[list[tuple[str, str | None, str, Callable[[], object]]]] = [
+        ('help-compiler', None, "list available compilers", show_compilers),
+    ]
+
+    def initialize_options(self):
+        self.build_base = 'build'
+        # these are decided only after 'build_base' has its final value
+        # (unless overridden by the user or client)
+        self.build_purelib = None
+        self.build_platlib = None
+        self.build_lib = None
+        self.build_temp = None
+        self.build_scripts = None
+        self.compiler = None
+        self.plat_name = None
+        self.debug = None
+        self.force = False
+        self.executable = None
+        self.parallel = None
+
+    def finalize_options(self) -> None:  # noqa: C901
+        if self.plat_name is None:
+            self.plat_name = get_platform()
+        else:
+            # plat-name only supported for windows (other platforms are
+            # supported via ./configure flags, if at all).  Avoid misleading
+            # other platforms.
+            if os.name != 'nt':
+                raise DistutilsOptionError(
+                    "--plat-name only supported on Windows (try "
+                    "using './configure --help' on your platform)"
+                )
+
+        plat_specifier = f".{self.plat_name}-{sys.implementation.cache_tag}"
+
+        # Python 3.13+ with --disable-gil shouldn't share build directories
+        if sysconfig.get_config_var('Py_GIL_DISABLED'):
+            plat_specifier += 't'
+
+        # Make it so Python 2.x and Python 2.x with --with-pydebug don't
+        # share the same build directories. Doing so confuses the build
+        # process for C modules
+        if hasattr(sys, 'gettotalrefcount'):
+            plat_specifier += '-pydebug'
+
+        # 'build_purelib' and 'build_platlib' just default to 'lib' and
+        # 'lib.<plat>' under the base build directory.  We only use one of
+        # them for a given distribution, though --
+        if self.build_purelib is None:
+            self.build_purelib = os.path.join(self.build_base, 'lib')
+        if self.build_platlib is None:
+            self.build_platlib = os.path.join(self.build_base, 'lib' + plat_specifier)
+
+        # 'build_lib' is the actual directory that we will use for this
+        # particular module distribution -- if user didn't supply it, pick
+        # one of 'build_purelib' or 'build_platlib'.
+        if self.build_lib is None:
+            if self.distribution.has_ext_modules():
+                self.build_lib = self.build_platlib
+            else:
+                self.build_lib = self.build_purelib
+
+        # 'build_temp' -- temporary directory for compiler turds,
+        # "build/temp.<plat>"
+        if self.build_temp is None:
+            self.build_temp = os.path.join(self.build_base, 'temp' + plat_specifier)
+        if self.build_scripts is None:
+            self.build_scripts = os.path.join(
+                self.build_base,
+                f'scripts-{sys.version_info.major}.{sys.version_info.minor}',
+            )
+
+        if self.executable is None and sys.executable:
+            self.executable = os.path.normpath(sys.executable)
+
+        if isinstance(self.parallel, str):
+            try:
+                self.parallel = int(self.parallel)
+            except ValueError:
+                raise DistutilsOptionError("parallel should be an integer")
+
+    def run(self) -> None:
+        # Run all relevant sub-commands.  This will be some subset of:
+        #  - build_py      - pure Python modules
+        #  - build_clib    - standalone C libraries
+        #  - build_ext     - Python extensions
+        #  - build_scripts - (Python) scripts
+        for cmd_name in self.get_sub_commands():
+            self.run_command(cmd_name)
+
+    # -- Predicates for the sub-command list ---------------------------
+
+    def has_pure_modules(self):
+        return self.distribution.has_pure_modules()
+
+    def has_c_libraries(self):
+        return self.distribution.has_c_libraries()
+
+    def has_ext_modules(self):
+        return self.distribution.has_ext_modules()
+
+    def has_scripts(self):
+        return self.distribution.has_scripts()
+
+    sub_commands = [
+        ('build_py', has_pure_modules),
+        ('build_clib', has_c_libraries),
+        ('build_ext', has_ext_modules),
+        ('build_scripts', has_scripts),
+    ]
diff --git a/.venv/lib/python3.12/site-packages/setuptools/_distutils/command/build_clib.py b/.venv/lib/python3.12/site-packages/setuptools/_distutils/command/build_clib.py
new file mode 100644
index 00000000..8b65b3d8
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/setuptools/_distutils/command/build_clib.py
@@ -0,0 +1,201 @@
+"""distutils.command.build_clib
+
+Implements the Distutils 'build_clib' command, to build a C/C++ library
+that is included in the module distribution and needed by an extension
+module."""
+
+# XXX this module has *lots* of code ripped-off quite transparently from
+# build_ext.py -- not surprisingly really, as the work required to build
+# a static library from a collection of C source files is not really all
+# that different from what's required to build a shared object file from
+# a collection of C source files.  Nevertheless, I haven't done the
+# necessary refactoring to account for the overlap in code between the
+# two modules, mainly because a number of subtle details changed in the
+# cut 'n paste.  Sigh.
+from __future__ import annotations
+
+import os
+from collections.abc import Callable
+from distutils._log import log
+from typing import ClassVar
+
+from ..ccompiler import new_compiler, show_compilers
+from ..core import Command
+from ..errors import DistutilsSetupError
+from ..sysconfig import customize_compiler
+
+
+class build_clib(Command):
+    description = "build C/C++ libraries used by Python extensions"
+
+    user_options: ClassVar[list[tuple[str, str, str]]] = [
+        ('build-clib=', 'b', "directory to build C/C++ libraries to"),
+        ('build-temp=', 't', "directory to put temporary build by-products"),
+        ('debug', 'g', "compile with debugging information"),
+        ('force', 'f', "forcibly build everything (ignore file timestamps)"),
+        ('compiler=', 'c', "specify the compiler type"),
+    ]
+
+    boolean_options: ClassVar[list[str]] = ['debug', 'force']
+
+    help_options: ClassVar[list[tuple[str, str | None, str, Callable[[], object]]]] = [
+        ('help-compiler', None, "list available compilers", show_compilers),
+    ]
+
+    def initialize_options(self):
+        self.build_clib = None
+        self.build_temp = None
+
+        # List of libraries to build
+        self.libraries = None
+
+        # Compilation options for all libraries
+        self.include_dirs = None
+        self.define = None
+        self.undef = None
+        self.debug = None
+        self.force = False
+        self.compiler = None
+
+    def finalize_options(self) -> None:
+        # This might be confusing: both build-clib and build-temp default
+        # to build-temp as defined by the "build" command.  This is because
+        # I think that C libraries are really just temporary build
+        # by-products, at least from the point of view of building Python
+        # extensions -- but I want to keep my options open.
+        self.set_undefined_options(
+            'build',
+            ('build_temp', 'build_clib'),
+            ('build_temp', 'build_temp'),
+            ('compiler', 'compiler'),
+            ('debug', 'debug'),
+            ('force', 'force'),
+        )
+
+        self.libraries = self.distribution.libraries
+        if self.libraries:
+            self.check_library_list(self.libraries)
+
+        if self.include_dirs is None:
+            self.include_dirs = self.distribution.include_dirs or []
+        if isinstance(self.include_dirs, str):
+            self.include_dirs = self.include_dirs.split(os.pathsep)
+
+        # XXX same as for build_ext -- what about 'self.define' and
+        # 'self.undef' ?
+
+    def run(self) -> None:
+        if not self.libraries:
+            return
+
+        self.compiler = new_compiler(
+            compiler=self.compiler, dry_run=self.dry_run, force=self.force
+        )
+        customize_compiler(self.compiler)
+
+        if self.include_dirs is not None:
+            self.compiler.set_include_dirs(self.include_dirs)
+        if self.define is not None:
+            # 'define' option is a list of (name,value) tuples
+            for name, value in self.define:
+                self.compiler.define_macro(name, value)
+        if self.undef is not None:
+            for macro in self.undef:
+                self.compiler.undefine_macro(macro)
+
+        self.build_libraries(self.libraries)
+
+    def check_library_list(self, libraries) -> None:
+        """Ensure that the list of libraries is valid.
+
+        `library` is presumably provided as a command option 'libraries'.
+        This method checks that it is a list of 2-tuples, where the tuples
+        are (library_name, build_info_dict).
+
+        Raise DistutilsSetupError if the structure is invalid anywhere;
+        just returns otherwise.
+        """
+        if not isinstance(libraries, list):
+            raise DistutilsSetupError("'libraries' option must be a list of tuples")
+
+        for lib in libraries:
+            if not isinstance(lib, tuple) and len(lib) != 2:
+                raise DistutilsSetupError("each element of 'libraries' must a 2-tuple")
+
+            name, build_info = lib
+
+            if not isinstance(name, str):
+                raise DistutilsSetupError(
+                    "first element of each tuple in 'libraries' "
+                    "must be a string (the library name)"
+                )
+
+            if '/' in name or (os.sep != '/' and os.sep in name):
+                raise DistutilsSetupError(
+                    f"bad library name '{lib[0]}': may not contain directory separators"
+                )
+
+            if not isinstance(build_info, dict):
+                raise DistutilsSetupError(
+                    "second element of each tuple in 'libraries' "
+                    "must be a dictionary (build info)"
+                )
+
+    def get_library_names(self):
+        # Assume the library list is valid -- 'check_library_list()' is
+        # called from 'finalize_options()', so it should be!
+        if not self.libraries:
+            return None
+
+        lib_names = []
+        for lib_name, _build_info in self.libraries:
+            lib_names.append(lib_name)
+        return lib_names
+
+    def get_source_files(self):
+        self.check_library_list(self.libraries)
+        filenames = []
+        for lib_name, build_info in self.libraries:
+            sources = build_info.get('sources')
+            if sources is None or not isinstance(sources, (list, tuple)):
+                raise DistutilsSetupError(
+                    f"in 'libraries' option (library '{lib_name}'), "
+                    "'sources' must be present and must be "
+                    "a list of source filenames"
+                )
+
+            filenames.extend(sources)
+        return filenames
+
+    def build_libraries(self, libraries) -> None:
+        for lib_name, build_info in libraries:
+            sources = build_info.get('sources')
+            if sources is None or not isinstance(sources, (list, tuple)):
+                raise DistutilsSetupError(
+                    f"in 'libraries' option (library '{lib_name}'), "
+                    "'sources' must be present and must be "
+                    "a list of source filenames"
+                )
+            sources = list(sources)
+
+            log.info("building '%s' library", lib_name)
+
+            # First, compile the source code to object files in the library
+            # directory.  (This should probably change to putting object
+            # files in a temporary build directory.)
+            macros = build_info.get('macros')
+            include_dirs = build_info.get('include_dirs')
+            objects = self.compiler.compile(
+                sources,
+                output_dir=self.build_temp,
+                macros=macros,
+                include_dirs=include_dirs,
+                debug=self.debug,
+            )
+
+            # Now "link" the object files together into a static library.
+            # (On Unix at least, this isn't really linking -- it just
+            # builds an archive.  Whatever.)
+            self.compiler.create_static_lib(
+                objects, lib_name, output_dir=self.build_clib, debug=self.debug
+            )
diff --git a/.venv/lib/python3.12/site-packages/setuptools/_distutils/command/build_ext.py b/.venv/lib/python3.12/site-packages/setuptools/_distutils/command/build_ext.py
new file mode 100644
index 00000000..ec45b440
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/setuptools/_distutils/command/build_ext.py
@@ -0,0 +1,812 @@
+"""distutils.command.build_ext
+
+Implements the Distutils 'build_ext' command, for building extension
+modules (currently limited to C extensions, should accommodate C++
+extensions ASAP)."""
+
+from __future__ import annotations
+
+import contextlib
+import os
+import re
+import sys
+from collections.abc import Callable
+from distutils._log import log
+from site import USER_BASE
+from typing import ClassVar
+
+from .._modified import newer_group
+from ..ccompiler import new_compiler, show_compilers
+from ..core import Command
+from ..errors import (
+    CCompilerError,
+    CompileError,
+    DistutilsError,
+    DistutilsOptionError,
+    DistutilsPlatformError,
+    DistutilsSetupError,
+)
+from ..extension import Extension
+from ..sysconfig import customize_compiler, get_config_h_filename, get_python_version
+from ..util import get_platform, is_freethreaded, is_mingw
+
+# An extension name is just a dot-separated list of Python NAMEs (ie.
+# the same as a fully-qualified module name).
+extension_name_re = re.compile(r'^[a-zA-Z_][a-zA-Z_0-9]*(\.[a-zA-Z_][a-zA-Z_0-9]*)*$')
+
+
+class build_ext(Command):
+    description = "build C/C++ extensions (compile/link to build directory)"
+
+    # XXX thoughts on how to deal with complex command-line options like
+    # these, i.e. how to make it so fancy_getopt can suck them off the
+    # command line and make it look like setup.py defined the appropriate
+    # lists of tuples of what-have-you.
+    #   - each command needs a callback to process its command-line options
+    #   - Command.__init__() needs access to its share of the whole
+    #     command line (must ultimately come from
+    #     Distribution.parse_command_line())
+    #   - it then calls the current command class' option-parsing
+    #     callback to deal with weird options like -D, which have to
+    #     parse the option text and churn out some custom data
+    #     structure
+    #   - that data structure (in this case, a list of 2-tuples)
+    #     will then be present in the command object by the time
+    #     we get to finalize_options() (i.e. the constructor
+    #     takes care of both command-line and client options
+    #     in between initialize_options() and finalize_options())
+
+    sep_by = f" (separated by '{os.pathsep}')"
+    user_options = [
+        ('build-lib=', 'b', "directory for compiled extension modules"),
+        ('build-temp=', 't', "directory for temporary files (build by-products)"),
+        (
+            'plat-name=',
+            'p',
+            "platform name to cross-compile for, if supported "
+            f"[default: {get_platform()}]",
+        ),
+        (
+            'inplace',
+            'i',
+            "ignore build-lib and put compiled extensions into the source "
+            "directory alongside your pure Python modules",
+        ),
+        (
+            'include-dirs=',
+            'I',
+            "list of directories to search for header files" + sep_by,
+        ),
+        ('define=', 'D', "C preprocessor macros to define"),
+        ('undef=', 'U', "C preprocessor macros to undefine"),
+        ('libraries=', 'l', "external C libraries to link with"),
+        (
+            'library-dirs=',
+            'L',
+            "directories to search for external C libraries" + sep_by,
+        ),
+        ('rpath=', 'R', "directories to search for shared C libraries at runtime"),
+        ('link-objects=', 'O', "extra explicit link objects to include in the link"),
+        ('debug', 'g', "compile/link with debugging information"),
+        ('force', 'f', "forcibly build everything (ignore file timestamps)"),
+        ('compiler=', 'c', "specify the compiler type"),
+        ('parallel=', 'j', "number of parallel build jobs"),
+        ('swig-cpp', None, "make SWIG create C++ files (default is C)"),
+        ('swig-opts=', None, "list of SWIG command line options"),
+        ('swig=', None, "path to the SWIG executable"),
+        ('user', None, "add user include, library and rpath"),
+    ]
+
+    boolean_options: ClassVar[list[str]] = [
+        'inplace',
+        'debug',
+        'force',
+        'swig-cpp',
+        'user',
+    ]
+
+    help_options: ClassVar[list[tuple[str, str | None, str, Callable[[], object]]]] = [
+        ('help-compiler', None, "list available compilers", show_compilers),
+    ]
+
+    def initialize_options(self):
+        self.extensions = None
+        self.build_lib = None
+        self.plat_name = None
+        self.build_temp = None
+        self.inplace = False
+        self.package = None
+
+        self.include_dirs = None
+        self.define = None
+        self.undef = None
+        self.libraries = None
+        self.library_dirs = None
+        self.rpath = None
+        self.link_objects = None
+        self.debug = None
+        self.force = None
+        self.compiler = None
+        self.swig = None
+        self.swig_cpp = None
+        self.swig_opts = None
+        self.user = None
+        self.parallel = None
+
+    @staticmethod
+    def _python_lib_dir(sysconfig):
+        """
+        Resolve Python's library directory for building extensions
+        that rely on a shared Python library.
+
+        See python/cpython#44264 and python/cpython#48686
+        """
+        if not sysconfig.get_config_var('Py_ENABLE_SHARED'):
+            return
+
+        if sysconfig.python_build:
+            yield '.'
+            return
+
+        if sys.platform == 'zos':
+            # On z/OS, a user is not required to install Python to
+            # a predetermined path, but can use Python portably
+            installed_dir = sysconfig.get_config_var('base')
+            lib_dir = sysconfig.get_config_var('platlibdir')
+            yield os.path.join(installed_dir, lib_dir)
+        else:
+            # building third party extensions
+            yield sysconfig.get_config_var('LIBDIR')
+
+    def finalize_options(self) -> None:  # noqa: C901
+        from distutils import sysconfig
+
+        self.set_undefined_options(
+            'build',
+            ('build_lib', 'build_lib'),
+            ('build_temp', 'build_temp'),
+            ('compiler', 'compiler'),
+            ('debug', 'debug'),
+            ('force', 'force'),
+            ('parallel', 'parallel'),
+            ('plat_name', 'plat_name'),
+        )
+
+        if self.package is None:
+            self.package = self.distribution.ext_package
+
+        self.extensions = self.distribution.ext_modules
+
+        # Make sure Python's include directories (for Python.h, pyconfig.h,
+        # etc.) are in the include search path.
+        py_include = sysconfig.get_python_inc()
+        plat_py_include = sysconfig.get_python_inc(plat_specific=True)
+        if self.include_dirs is None:
+            self.include_dirs = self.distribution.include_dirs or []
+        if isinstance(self.include_dirs, str):
+            self.include_dirs = self.include_dirs.split(os.pathsep)
+
+        # If in a virtualenv, add its include directory
+        # Issue 16116
+        if sys.exec_prefix != sys.base_exec_prefix:
+            self.include_dirs.append(os.path.join(sys.exec_prefix, 'include'))
+
+        # Put the Python "system" include dir at the end, so that
+        # any local include dirs take precedence.
+        self.include_dirs.extend(py_include.split(os.path.pathsep))
+        if plat_py_include != py_include:
+            self.include_dirs.extend(plat_py_include.split(os.path.pathsep))
+
+        self.ensure_string_list('libraries')
+        self.ensure_string_list('link_objects')
+
+        # Life is easier if we're not forever checking for None, so
+        # simplify these options to empty lists if unset
+        if self.libraries is None:
+            self.libraries = []
+        if self.library_dirs is None:
+            self.library_dirs = []
+        elif isinstance(self.library_dirs, str):
+            self.library_dirs = self.library_dirs.split(os.pathsep)
+
+        if self.rpath is None:
+            self.rpath = []
+        elif isinstance(self.rpath, str):
+            self.rpath = self.rpath.split(os.pathsep)
+
+        # for extensions under windows use different directories
+        # for Release and Debug builds.
+        # also Python's library directory must be appended to library_dirs
+        if os.name == 'nt' and not is_mingw():
+            # the 'libs' directory is for binary installs - we assume that
+            # must be the *native* platform.  But we don't really support
+            # cross-compiling via a binary install anyway, so we let it go.
+            self.library_dirs.append(os.path.join(sys.exec_prefix, 'libs'))
+            if sys.base_exec_prefix != sys.prefix:  # Issue 16116
+                self.library_dirs.append(os.path.join(sys.base_exec_prefix, 'libs'))
+            if self.debug:
+                self.build_temp = os.path.join(self.build_temp, "Debug")
+            else:
+                self.build_temp = os.path.join(self.build_temp, "Release")
+
+            # Append the source distribution include and library directories,
+            # this allows distutils on windows to work in the source tree
+            self.include_dirs.append(os.path.dirname(get_config_h_filename()))
+            self.library_dirs.append(sys.base_exec_prefix)
+
+            # Use the .lib files for the correct architecture
+            if self.plat_name == 'win32':
+                suffix = 'win32'
+            else:
+                # win-amd64
+                suffix = self.plat_name[4:]
+            new_lib = os.path.join(sys.exec_prefix, 'PCbuild')
+            if suffix:
+                new_lib = os.path.join(new_lib, suffix)
+            self.library_dirs.append(new_lib)
+
+        # For extensions under Cygwin, Python's library directory must be
+        # appended to library_dirs
+        if sys.platform[:6] == 'cygwin':
+            if not sysconfig.python_build:
+                # building third party extensions
+                self.library_dirs.append(
+                    os.path.join(
+                        sys.prefix, "lib", "python" + get_python_version(), "config"
+                    )
+                )
+            else:
+                # building python standard extensions
+                self.library_dirs.append('.')
+
+        self.library_dirs.extend(self._python_lib_dir(sysconfig))
+
+        # The argument parsing will result in self.define being a string, but
+        # it has to be a list of 2-tuples.  All the preprocessor symbols
+        # specified by the 'define' option will be set to '1'.  Multiple
+        # symbols can be separated with commas.
+
+        if self.define:
+            defines = self.define.split(',')
+            self.define = [(symbol, '1') for symbol in defines]
+
+        # The option for macros to undefine is also a string from the
+        # option parsing, but has to be a list.  Multiple symbols can also
+        # be separated with commas here.
+        if self.undef:
+            self.undef = self.undef.split(',')
+
+        if self.swig_opts is None:
+            self.swig_opts = []
+        else:
+            self.swig_opts = self.swig_opts.split(' ')
+
+        # Finally add the user include and library directories if requested
+        if self.user:
+            user_include = os.path.join(USER_BASE, "include")
+            user_lib = os.path.join(USER_BASE, "lib")
+            if os.path.isdir(user_include):
+                self.include_dirs.append(user_include)
+            if os.path.isdir(user_lib):
+                self.library_dirs.append(user_lib)
+                self.rpath.append(user_lib)
+
+        if isinstance(self.parallel, str):
+            try:
+                self.parallel = int(self.parallel)
+            except ValueError:
+                raise DistutilsOptionError("parallel should be an integer")
+
+    def run(self) -> None:  # noqa: C901
+        # 'self.extensions', as supplied by setup.py, is a list of
+        # Extension instances.  See the documentation for Extension (in
+        # distutils.extension) for details.
+        #
+        # For backwards compatibility with Distutils 0.8.2 and earlier, we
+        # also allow the 'extensions' list to be a list of tuples:
+        #    (ext_name, build_info)
+        # where build_info is a dictionary containing everything that
+        # Extension instances do except the name, with a few things being
+        # differently named.  We convert these 2-tuples to Extension
+        # instances as needed.
+
+        if not self.extensions:
+            return
+
+        # If we were asked to build any C/C++ libraries, make sure that the
+        # directory where we put them is in the library search path for
+        # linking extensions.
+        if self.distribution.has_c_libraries():
+            build_clib = self.get_finalized_command('build_clib')
+            self.libraries.extend(build_clib.get_library_names() or [])
+            self.library_dirs.append(build_clib.build_clib)
+
+        # Setup the CCompiler object that we'll use to do all the
+        # compiling and linking
+        self.compiler = new_compiler(
+            compiler=self.compiler,
+            verbose=self.verbose,
+            dry_run=self.dry_run,
+            force=self.force,
+        )
+        customize_compiler(self.compiler)
+        # If we are cross-compiling, init the compiler now (if we are not
+        # cross-compiling, init would not hurt, but people may rely on
+        # late initialization of compiler even if they shouldn't...)
+        if os.name == 'nt' and self.plat_name != get_platform():
+            self.compiler.initialize(self.plat_name)
+
+        # The official Windows free threaded Python installer doesn't set
+        # Py_GIL_DISABLED because its pyconfig.h is shared with the
+        # default build, so define it here (pypa/setuptools#4662).
+        if os.name == 'nt' and is_freethreaded():
+            self.compiler.define_macro('Py_GIL_DISABLED', '1')
+
+        # And make sure that any compile/link-related options (which might
+        # come from the command-line or from the setup script) are set in
+        # that CCompiler object -- that way, they automatically apply to
+        # all compiling and linking done here.
+        if self.include_dirs is not None:
+            self.compiler.set_include_dirs(self.include_dirs)
+        if self.define is not None:
+            # 'define' option is a list of (name,value) tuples
+            for name, value in self.define:
+                self.compiler.define_macro(name, value)
+        if self.undef is not None:
+            for macro in self.undef:
+                self.compiler.undefine_macro(macro)
+        if self.libraries is not None:
+            self.compiler.set_libraries(self.libraries)
+        if self.library_dirs is not None:
+            self.compiler.set_library_dirs(self.library_dirs)
+        if self.rpath is not None:
+            self.compiler.set_runtime_library_dirs(self.rpath)
+        if self.link_objects is not None:
+            self.compiler.set_link_objects(self.link_objects)
+
+        # Now actually compile and link everything.
+        self.build_extensions()
+
+    def check_extensions_list(self, extensions) -> None:  # noqa: C901
+        """Ensure that the list of extensions (presumably provided as a
+        command option 'extensions') is valid, i.e. it is a list of
+        Extension objects.  We also support the old-style list of 2-tuples,
+        where the tuples are (ext_name, build_info), which are converted to
+        Extension instances here.
+
+        Raise DistutilsSetupError if the structure is invalid anywhere;
+        just returns otherwise.
+        """
+        if not isinstance(extensions, list):
+            raise DistutilsSetupError(
+                "'ext_modules' option must be a list of Extension instances"
+            )
+
+        for i, ext in enumerate(extensions):
+            if isinstance(ext, Extension):
+                continue  # OK! (assume type-checking done
+                # by Extension constructor)
+
+            if not isinstance(ext, tuple) or len(ext) != 2:
+                raise DistutilsSetupError(
+                    "each element of 'ext_modules' option must be an "
+                    "Extension instance or 2-tuple"
+                )
+
+            ext_name, build_info = ext
+
+            log.warning(
+                "old-style (ext_name, build_info) tuple found in "
+                "ext_modules for extension '%s' "
+                "-- please convert to Extension instance",
+                ext_name,
+            )
+
+            if not (isinstance(ext_name, str) and extension_name_re.match(ext_name)):
+                raise DistutilsSetupError(
+                    "first element of each tuple in 'ext_modules' "
+                    "must be the extension name (a string)"
+                )
+
+            if not isinstance(build_info, dict):
+                raise DistutilsSetupError(
+                    "second element of each tuple in 'ext_modules' "
+                    "must be a dictionary (build info)"
+                )
+
+            # OK, the (ext_name, build_info) dict is type-safe: convert it
+            # to an Extension instance.
+            ext = Extension(ext_name, build_info['sources'])
+
+            # Easy stuff: one-to-one mapping from dict elements to
+            # instance attributes.
+            for key in (
+                'include_dirs',
+                'library_dirs',
+                'libraries',
+                'extra_objects',
+                'extra_compile_args',
+                'extra_link_args',
+            ):
+                val = build_info.get(key)
+                if val is not None:
+                    setattr(ext, key, val)
+
+            # Medium-easy stuff: same syntax/semantics, different names.
+            ext.runtime_library_dirs = build_info.get('rpath')
+            if 'def_file' in build_info:
+                log.warning("'def_file' element of build info dict no longer supported")
+
+            # Non-trivial stuff: 'macros' split into 'define_macros'
+            # and 'undef_macros'.
+            macros = build_info.get('macros')
+            if macros:
+                ext.define_macros = []
+                ext.undef_macros = []
+                for macro in macros:
+                    if not (isinstance(macro, tuple) and len(macro) in (1, 2)):
+                        raise DistutilsSetupError(
+                            "'macros' element of build info dict must be 1- or 2-tuple"
+                        )
+                    if len(macro) == 1:
+                        ext.undef_macros.append(macro[0])
+                    elif len(macro) == 2:
+                        ext.define_macros.append(macro)
+
+            extensions[i] = ext
+
+    def get_source_files(self):
+        self.check_extensions_list(self.extensions)
+        filenames = []
+
+        # Wouldn't it be neat if we knew the names of header files too...
+        for ext in self.extensions:
+            filenames.extend(ext.sources)
+        return filenames
+
+    def get_outputs(self):
+        # Sanity check the 'extensions' list -- can't assume this is being
+        # done in the same run as a 'build_extensions()' call (in fact, we
+        # can probably assume that it *isn't*!).
+        self.check_extensions_list(self.extensions)
+
+        # And build the list of output (built) filenames.  Note that this
+        # ignores the 'inplace' flag, and assumes everything goes in the
+        # "build" tree.
+        return [self.get_ext_fullpath(ext.name) for ext in self.extensions]
+
+    def build_extensions(self) -> None:
+        # First, sanity-check the 'extensions' list
+        self.check_extensions_list(self.extensions)
+        if self.parallel:
+            self._build_extensions_parallel()
+        else:
+            self._build_extensions_serial()
+
+    def _build_extensions_parallel(self):
+        workers = self.parallel
+        if self.parallel is True:
+            workers = os.cpu_count()  # may return None
+        try:
+            from concurrent.futures import ThreadPoolExecutor
+        except ImportError:
+            workers = None
+
+        if workers is None:
+            self._build_extensions_serial()
+            return
+
+        with ThreadPoolExecutor(max_workers=workers) as executor:
+            futures = [
+                executor.submit(self.build_extension, ext) for ext in self.extensions
+            ]
+            for ext, fut in zip(self.extensions, futures):
+                with self._filter_build_errors(ext):
+                    fut.result()
+
+    def _build_extensions_serial(self):
+        for ext in self.extensions:
+            with self._filter_build_errors(ext):
+                self.build_extension(ext)
+
+    @contextlib.contextmanager
+    def _filter_build_errors(self, ext):
+        try:
+            yield
+        except (CCompilerError, DistutilsError, CompileError) as e:
+            if not ext.optional:
+                raise
+            self.warn(f'building extension "{ext.name}" failed: {e}')
+
+    def build_extension(self, ext) -> None:
+        sources = ext.sources
+        if sources is None or not isinstance(sources, (list, tuple)):
+            raise DistutilsSetupError(
+                f"in 'ext_modules' option (extension '{ext.name}'), "
+                "'sources' must be present and must be "
+                "a list of source filenames"
+            )
+        # sort to make the resulting .so file build reproducible
+        sources = sorted(sources)
+
+        ext_path = self.get_ext_fullpath(ext.name)
+        depends = sources + ext.depends
+        if not (self.force or newer_group(depends, ext_path, 'newer')):
+            log.debug("skipping '%s' extension (up-to-date)", ext.name)
+            return
+        else:
+            log.info("building '%s' extension", ext.name)
+
+        # First, scan the sources for SWIG definition files (.i), run
+        # SWIG on 'em to create .c files, and modify the sources list
+        # accordingly.
+        sources = self.swig_sources(sources, ext)
+
+        # Next, compile the source code to object files.
+
+        # XXX not honouring 'define_macros' or 'undef_macros' -- the
+        # CCompiler API needs to change to accommodate this, and I
+        # want to do one thing at a time!
+
+        # Two possible sources for extra compiler arguments:
+        #   - 'extra_compile_args' in Extension object
+        #   - CFLAGS environment variable (not particularly
+        #     elegant, but people seem to expect it and I
+        #     guess it's useful)
+        # The environment variable should take precedence, and
+        # any sensible compiler will give precedence to later
+        # command line args.  Hence we combine them in order:
+        extra_args = ext.extra_compile_args or []
+
+        macros = ext.define_macros[:]
+        for undef in ext.undef_macros:
+            macros.append((undef,))
+
+        objects = self.compiler.compile(
+            sources,
+            output_dir=self.build_temp,
+            macros=macros,
+            include_dirs=ext.include_dirs,
+            debug=self.debug,
+            extra_postargs=extra_args,
+            depends=ext.depends,
+        )
+
+        # XXX outdated variable, kept here in case third-part code
+        # needs it.
+        self._built_objects = objects[:]
+
+        # Now link the object files together into a "shared object" --
+        # of course, first we have to figure out all the other things
+        # that go into the mix.
+        if ext.extra_objects:
+            objects.extend(ext.extra_objects)
+        extra_args = ext.extra_link_args or []
+
+        # Detect target language, if not provided
+        language = ext.language or self.compiler.detect_language(sources)
+
+        self.compiler.link_shared_object(
+            objects,
+            ext_path,
+            libraries=self.get_libraries(ext),
+            library_dirs=ext.library_dirs,
+            runtime_library_dirs=ext.runtime_library_dirs,
+            extra_postargs=extra_args,
+            export_symbols=self.get_export_symbols(ext),
+            debug=self.debug,
+            build_temp=self.build_temp,
+            target_lang=language,
+        )
+
+    def swig_sources(self, sources, extension):
+        """Walk the list of source files in 'sources', looking for SWIG
+        interface (.i) files.  Run SWIG on all that are found, and
+        return a modified 'sources' list with SWIG source files replaced
+        by the generated C (or C++) files.
+        """
+        new_sources = []
+        swig_sources = []
+        swig_targets = {}
+
+        # XXX this drops generated C/C++ files into the source tree, which
+        # is fine for developers who want to distribute the generated
+        # source -- but there should be an option to put SWIG output in
+        # the temp dir.
+
+        if self.swig_cpp:
+            log.warning("--swig-cpp is deprecated - use --swig-opts=-c++")
+
+        if (
+            self.swig_cpp
+            or ('-c++' in self.swig_opts)
+            or ('-c++' in extension.swig_opts)
+        ):
+            target_ext = '.cpp'
+        else:
+            target_ext = '.c'
+
+        for source in sources:
+            (base, ext) = os.path.splitext(source)
+            if ext == ".i":  # SWIG interface file
+                new_sources.append(base + '_wrap' + target_ext)
+                swig_sources.append(source)
+                swig_targets[source] = new_sources[-1]
+            else:
+                new_sources.append(source)
+
+        if not swig_sources:
+            return new_sources
+
+        swig = self.swig or self.find_swig()
+        swig_cmd = [swig, "-python"]
+        swig_cmd.extend(self.swig_opts)
+        if self.swig_cpp:
+            swig_cmd.append("-c++")
+
+        # Do not override commandline arguments
+        if not self.swig_opts:
+            swig_cmd.extend(extension.swig_opts)
+
+        for source in swig_sources:
+            target = swig_targets[source]
+            log.info("swigging %s to %s", source, target)
+            self.spawn(swig_cmd + ["-o", target, source])
+
+        return new_sources
+
+    def find_swig(self):
+        """Return the name of the SWIG executable.  On Unix, this is
+        just "swig" -- it should be in the PATH.  Tries a bit harder on
+        Windows.
+        """
+        if os.name == "posix":
+            return "swig"
+        elif os.name == "nt":
+            # Look for SWIG in its standard installation directory on
+            # Windows (or so I presume!).  If we find it there, great;
+            # if not, act like Unix and assume it's in the PATH.
+            for vers in ("1.3", "1.2", "1.1"):
+                fn = os.path.join(f"c:\\swig{vers}", "swig.exe")
+                if os.path.isfile(fn):
+                    return fn
+            else:
+                return "swig.exe"
+        else:
+            raise DistutilsPlatformError(
+                f"I don't know how to find (much less run) SWIG on platform '{os.name}'"
+            )
+
+    # -- Name generators -----------------------------------------------
+    # (extension names, filenames, whatever)
+    def get_ext_fullpath(self, ext_name: str) -> str:
+        """Returns the path of the filename for a given extension.
+
+        The file is located in `build_lib` or directly in the package
+        (inplace option).
+        """
+        fullname = self.get_ext_fullname(ext_name)
+        modpath = fullname.split('.')
+        filename = self.get_ext_filename(modpath[-1])
+
+        if not self.inplace:
+            # no further work needed
+            # returning :
+            #   build_dir/package/path/filename
+            filename = os.path.join(*modpath[:-1] + [filename])
+            return os.path.join(self.build_lib, filename)
+
+        # the inplace option requires to find the package directory
+        # using the build_py command for that
+        package = '.'.join(modpath[0:-1])
+        build_py = self.get_finalized_command('build_py')
+        package_dir = os.path.abspath(build_py.get_package_dir(package))
+
+        # returning
+        #   package_dir/filename
+        return os.path.join(package_dir, filename)
+
+    def get_ext_fullname(self, ext_name: str) -> str:
+        """Returns the fullname of a given extension name.
+
+        Adds the `package.` prefix"""
+        if self.package is None:
+            return ext_name
+        else:
+            return self.package + '.' + ext_name
+
+    def get_ext_filename(self, ext_name: str) -> str:
+        r"""Convert the name of an extension (eg. "foo.bar") into the name
+        of the file from which it will be loaded (eg. "foo/bar.so", or
+        "foo\bar.pyd").
+        """
+        from ..sysconfig import get_config_var
+
+        ext_path = ext_name.split('.')
+        ext_suffix = get_config_var('EXT_SUFFIX')
+        return os.path.join(*ext_path) + ext_suffix
+
+    def get_export_symbols(self, ext: Extension) -> list[str]:
+        """Return the list of symbols that a shared extension has to
+        export.  This either uses 'ext.export_symbols' or, if it's not
+        provided, "PyInit_" + module_name.  Only relevant on Windows, where
+        the .pyd file (DLL) must export the module "PyInit_" function.
+        """
+        name = self._get_module_name_for_symbol(ext)
+        try:
+            # Unicode module name support as defined in PEP-489
+            # https://peps.python.org/pep-0489/#export-hook-name
+            name.encode('ascii')
+        except UnicodeEncodeError:
+            suffix = 'U_' + name.encode('punycode').replace(b'-', b'_').decode('ascii')
+        else:
+            suffix = "_" + name
+
+        initfunc_name = "PyInit" + suffix
+        if initfunc_name not in ext.export_symbols:
+            ext.export_symbols.append(initfunc_name)
+        return ext.export_symbols
+
+    def _get_module_name_for_symbol(self, ext):
+        # Package name should be used for `__init__` modules
+        # https://github.com/python/cpython/issues/80074
+        # https://github.com/pypa/setuptools/issues/4826
+        parts = ext.name.split(".")
+        if parts[-1] == "__init__" and len(parts) >= 2:
+            return parts[-2]
+        return parts[-1]
+
+    def get_libraries(self, ext: Extension) -> list[str]:  # noqa: C901
+        """Return the list of libraries to link against when building a
+        shared extension.  On most platforms, this is just 'ext.libraries';
+        on Windows, we add the Python library (eg. python20.dll).
+        """
+        # The python library is always needed on Windows.  For MSVC, this
+        # is redundant, since the library is mentioned in a pragma in
+        # pyconfig.h that MSVC groks.  The other Windows compilers all seem
+        # to need it mentioned explicitly, though, so that's what we do.
+        # Append '_d' to the python import library on debug builds.
+        if sys.platform == "win32" and not is_mingw():
+            from .._msvccompiler import MSVCCompiler
+
+            if not isinstance(self.compiler, MSVCCompiler):
+                template = "python%d%d"
+                if self.debug:
+                    template = template + '_d'
+                pythonlib = template % (
+                    sys.hexversion >> 24,
+                    (sys.hexversion >> 16) & 0xFF,
+                )
+                # don't extend ext.libraries, it may be shared with other
+                # extensions, it is a reference to the original list
+                return ext.libraries + [pythonlib]
+        else:
+            # On Android only the main executable and LD_PRELOADs are considered
+            # to be RTLD_GLOBAL, all the dependencies of the main executable
+            # remain RTLD_LOCAL and so the shared libraries must be linked with
+            # libpython when python is built with a shared python library (issue
+            # bpo-21536).
+            # On Cygwin (and if required, other POSIX-like platforms based on
+            # Windows like MinGW) it is simply necessary that all symbols in
+            # shared libraries are resolved at link time.
+            from ..sysconfig import get_config_var
+
+            link_libpython = False
+            if get_config_var('Py_ENABLE_SHARED'):
+                # A native build on an Android device or on Cygwin
+                if hasattr(sys, 'getandroidapilevel'):
+                    link_libpython = True
+                elif sys.platform == 'cygwin' or is_mingw():
+                    link_libpython = True
+                elif '_PYTHON_HOST_PLATFORM' in os.environ:
+                    # We are cross-compiling for one of the relevant platforms
+                    if get_config_var('ANDROID_API_LEVEL') != 0:
+                        link_libpython = True
+                    elif get_config_var('MACHDEP') == 'cygwin':
+                        link_libpython = True
+
+            if link_libpython:
+                ldversion = get_config_var('LDVERSION')
+                return ext.libraries + ['python' + ldversion]
+
+        return ext.libraries
diff --git a/.venv/lib/python3.12/site-packages/setuptools/_distutils/command/build_py.py b/.venv/lib/python3.12/site-packages/setuptools/_distutils/command/build_py.py
new file mode 100644
index 00000000..a20b076f
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/setuptools/_distutils/command/build_py.py
@@ -0,0 +1,407 @@
+"""distutils.command.build_py
+
+Implements the Distutils 'build_py' command."""
+
+import glob
+import importlib.util
+import os
+import sys
+from distutils._log import log
+from typing import ClassVar
+
+from ..core import Command
+from ..errors import DistutilsFileError, DistutilsOptionError
+from ..util import convert_path
+
+
+class build_py(Command):
+    description = "\"build\" pure Python modules (copy to build directory)"
+
+    user_options = [
+        ('build-lib=', 'd', "directory to \"build\" (copy) to"),
+        ('compile', 'c', "compile .py to .pyc"),
+        ('no-compile', None, "don't compile .py files [default]"),
+        (
+            'optimize=',
+            'O',
+            "also compile with optimization: -O1 for \"python -O\", "
+            "-O2 for \"python -OO\", and -O0 to disable [default: -O0]",
+        ),
+        ('force', 'f', "forcibly build everything (ignore file timestamps)"),
+    ]
+
+    boolean_options: ClassVar[list[str]] = ['compile', 'force']
+    negative_opt: ClassVar[dict[str, str]] = {'no-compile': 'compile'}
+
+    def initialize_options(self):
+        self.build_lib = None
+        self.py_modules = None
+        self.package = None
+        self.package_data = None
+        self.package_dir = None
+        self.compile = False
+        self.optimize = 0
+        self.force = None
+
+    def finalize_options(self) -> None:
+        self.set_undefined_options(
+            'build', ('build_lib', 'build_lib'), ('force', 'force')
+        )
+
+        # Get the distribution options that are aliases for build_py
+        # options -- list of packages and list of modules.
+        self.packages = self.distribution.packages
+        self.py_modules = self.distribution.py_modules
+        self.package_data = self.distribution.package_data
+        self.package_dir = {}
+        if self.distribution.package_dir:
+            for name, path in self.distribution.package_dir.items():
+                self.package_dir[name] = convert_path(path)
+        self.data_files = self.get_data_files()
+
+        # Ick, copied straight from install_lib.py (fancy_getopt needs a
+        # type system!  Hell, *everything* needs a type system!!!)
+        if not isinstance(self.optimize, int):
+            try:
+                self.optimize = int(self.optimize)
+                assert 0 <= self.optimize <= 2
+            except (ValueError, AssertionError):
+                raise DistutilsOptionError("optimize must be 0, 1, or 2")
+
+    def run(self) -> None:
+        # XXX copy_file by default preserves atime and mtime.  IMHO this is
+        # the right thing to do, but perhaps it should be an option -- in
+        # particular, a site administrator might want installed files to
+        # reflect the time of installation rather than the last
+        # modification time before the installed release.
+
+        # XXX copy_file by default preserves mode, which appears to be the
+        # wrong thing to do: if a file is read-only in the working
+        # directory, we want it to be installed read/write so that the next
+        # installation of the same module distribution can overwrite it
+        # without problems.  (This might be a Unix-specific issue.)  Thus
+        # we turn off 'preserve_mode' when copying to the build directory,
+        # since the build directory is supposed to be exactly what the
+        # installation will look like (ie. we preserve mode when
+        # installing).
+
+        # Two options control which modules will be installed: 'packages'
+        # and 'py_modules'.  The former lets us work with whole packages, not
+        # specifying individual modules at all; the latter is for
+        # specifying modules one-at-a-time.
+
+        if self.py_modules:
+            self.build_modules()
+        if self.packages:
+            self.build_packages()
+            self.build_package_data()
+
+        self.byte_compile(self.get_outputs(include_bytecode=False))
+
+    def get_data_files(self):
+        """Generate list of '(package,src_dir,build_dir,filenames)' tuples"""
+        data = []
+        if not self.packages:
+            return data
+        for package in self.packages:
+            # Locate package source directory
+            src_dir = self.get_package_dir(package)
+
+            # Compute package build directory
+            build_dir = os.path.join(*([self.build_lib] + package.split('.')))
+
+            # Length of path to strip from found files
+            plen = 0
+            if src_dir:
+                plen = len(src_dir) + 1
+
+            # Strip directory from globbed filenames
+            filenames = [file[plen:] for file in self.find_data_files(package, src_dir)]
+            data.append((package, src_dir, build_dir, filenames))
+        return data
+
+    def find_data_files(self, package, src_dir):
+        """Return filenames for package's data files in 'src_dir'"""
+        globs = self.package_data.get('', []) + self.package_data.get(package, [])
+        files = []
+        for pattern in globs:
+            # Each pattern has to be converted to a platform-specific path
+            filelist = glob.glob(
+                os.path.join(glob.escape(src_dir), convert_path(pattern))
+            )
+            # Files that match more than one pattern are only added once
+            files.extend([
+                fn for fn in filelist if fn not in files and os.path.isfile(fn)
+            ])
+        return files
+
+    def build_package_data(self) -> None:
+        """Copy data files into build directory"""
+        for _package, src_dir, build_dir, filenames in self.data_files:
+            for filename in filenames:
+                target = os.path.join(build_dir, filename)
+                self.mkpath(os.path.dirname(target))
+                self.copy_file(
+                    os.path.join(src_dir, filename), target, preserve_mode=False
+                )
+
+    def get_package_dir(self, package):
+        """Return the directory, relative to the top of the source
+        distribution, where package 'package' should be found
+        (at least according to the 'package_dir' option, if any)."""
+        path = package.split('.')
+
+        if not self.package_dir:
+            if path:
+                return os.path.join(*path)
+            else:
+                return ''
+        else:
+            tail = []
+            while path:
+                try:
+                    pdir = self.package_dir['.'.join(path)]
+                except KeyError:
+                    tail.insert(0, path[-1])
+                    del path[-1]
+                else:
+                    tail.insert(0, pdir)
+                    return os.path.join(*tail)
+            else:
+                # Oops, got all the way through 'path' without finding a
+                # match in package_dir.  If package_dir defines a directory
+                # for the root (nameless) package, then fallback on it;
+                # otherwise, we might as well have not consulted
+                # package_dir at all, as we just use the directory implied
+                # by 'tail' (which should be the same as the original value
+                # of 'path' at this point).
+                pdir = self.package_dir.get('')
+                if pdir is not None:
+                    tail.insert(0, pdir)
+
+                if tail:
+                    return os.path.join(*tail)
+                else:
+                    return ''
+
+    def check_package(self, package, package_dir):
+        # Empty dir name means current directory, which we can probably
+        # assume exists.  Also, os.path.exists and isdir don't know about
+        # my "empty string means current dir" convention, so we have to
+        # circumvent them.
+        if package_dir != "":
+            if not os.path.exists(package_dir):
+                raise DistutilsFileError(
+                    f"package directory '{package_dir}' does not exist"
+                )
+            if not os.path.isdir(package_dir):
+                raise DistutilsFileError(
+                    f"supposed package directory '{package_dir}' exists, "
+                    "but is not a directory"
+                )
+
+        # Directories without __init__.py are namespace packages (PEP 420).
+        if package:
+            init_py = os.path.join(package_dir, "__init__.py")
+            if os.path.isfile(init_py):
+                return init_py
+
+        # Either not in a package at all (__init__.py not expected), or
+        # __init__.py doesn't exist -- so don't return the filename.
+        return None
+
+    def check_module(self, module, module_file):
+        if not os.path.isfile(module_file):
+            log.warning("file %s (for module %s) not found", module_file, module)
+            return False
+        else:
+            return True
+
+    def find_package_modules(self, package, package_dir):
+        self.check_package(package, package_dir)
+        module_files = glob.glob(os.path.join(glob.escape(package_dir), "*.py"))
+        modules = []
+        setup_script = os.path.abspath(self.distribution.script_name)
+
+        for f in module_files:
+            abs_f = os.path.abspath(f)
+            if abs_f != setup_script:
+                module = os.path.splitext(os.path.basename(f))[0]
+                modules.append((package, module, f))
+            else:
+                self.debug_print(f"excluding {setup_script}")
+        return modules
+
+    def find_modules(self):
+        """Finds individually-specified Python modules, ie. those listed by
+        module name in 'self.py_modules'.  Returns a list of tuples (package,
+        module_base, filename): 'package' is a tuple of the path through
+        package-space to the module; 'module_base' is the bare (no
+        packages, no dots) module name, and 'filename' is the path to the
+        ".py" file (relative to the distribution root) that implements the
+        module.
+        """
+        # Map package names to tuples of useful info about the package:
+        #    (package_dir, checked)
+        # package_dir - the directory where we'll find source files for
+        #   this package
+        # checked - true if we have checked that the package directory
+        #   is valid (exists, contains __init__.py, ... ?)
+        packages = {}
+
+        # List of (package, module, filename) tuples to return
+        modules = []
+
+        # We treat modules-in-packages almost the same as toplevel modules,
+        # just the "package" for a toplevel is empty (either an empty
+        # string or empty list, depending on context).  Differences:
+        #   - don't check for __init__.py in directory for empty package
+        for module in self.py_modules:
+            path = module.split('.')
+            package = '.'.join(path[0:-1])
+            module_base = path[-1]
+
+            try:
+                (package_dir, checked) = packages[package]
+            except KeyError:
+                package_dir = self.get_package_dir(package)
+                checked = False
+
+            if not checked:
+                init_py = self.check_package(package, package_dir)
+                packages[package] = (package_dir, 1)
+                if init_py:
+                    modules.append((package, "__init__", init_py))
+
+            # XXX perhaps we should also check for just .pyc files
+            # (so greedy closed-source bastards can distribute Python
+            # modules too)
+            module_file = os.path.join(package_dir, module_base + ".py")
+            if not self.check_module(module, module_file):
+                continue
+
+            modules.append((package, module_base, module_file))
+
+        return modules
+
+    def find_all_modules(self):
+        """Compute the list of all modules that will be built, whether
+        they are specified one-module-at-a-time ('self.py_modules') or
+        by whole packages ('self.packages').  Return a list of tuples
+        (package, module, module_file), just like 'find_modules()' and
+        'find_package_modules()' do."""
+        modules = []
+        if self.py_modules:
+            modules.extend(self.find_modules())
+        if self.packages:
+            for package in self.packages:
+                package_dir = self.get_package_dir(package)
+                m = self.find_package_modules(package, package_dir)
+                modules.extend(m)
+        return modules
+
+    def get_source_files(self):
+        return [module[-1] for module in self.find_all_modules()]
+
+    def get_module_outfile(self, build_dir, package, module):
+        outfile_path = [build_dir] + list(package) + [module + ".py"]
+        return os.path.join(*outfile_path)
+
+    def get_outputs(self, include_bytecode: bool = True) -> list[str]:
+        modules = self.find_all_modules()
+        outputs = []
+        for package, module, _module_file in modules:
+            package = package.split('.')
+            filename = self.get_module_outfile(self.build_lib, package, module)
+            outputs.append(filename)
+            if include_bytecode:
+                if self.compile:
+                    outputs.append(
+                        importlib.util.cache_from_source(filename, optimization='')
+                    )
+                if self.optimize > 0:
+                    outputs.append(
+                        importlib.util.cache_from_source(
+                            filename, optimization=self.optimize
+                        )
+                    )
+
+        outputs += [
+            os.path.join(build_dir, filename)
+            for package, src_dir, build_dir, filenames in self.data_files
+            for filename in filenames
+        ]
+
+        return outputs
+
+    def build_module(self, module, module_file, package):
+        if isinstance(package, str):
+            package = package.split('.')
+        elif not isinstance(package, (list, tuple)):
+            raise TypeError(
+                "'package' must be a string (dot-separated), list, or tuple"
+            )
+
+        # Now put the module source file into the "build" area -- this is
+        # easy, we just copy it somewhere under self.build_lib (the build
+        # directory for Python source).
+        outfile = self.get_module_outfile(self.build_lib, package, module)
+        dir = os.path.dirname(outfile)
+        self.mkpath(dir)
+        return self.copy_file(module_file, outfile, preserve_mode=False)
+
+    def build_modules(self) -> None:
+        modules = self.find_modules()
+        for package, module, module_file in modules:
+            # Now "build" the module -- ie. copy the source file to
+            # self.build_lib (the build directory for Python source).
+            # (Actually, it gets copied to the directory for this package
+            # under self.build_lib.)
+            self.build_module(module, module_file, package)
+
+    def build_packages(self) -> None:
+        for package in self.packages:
+            # Get list of (package, module, module_file) tuples based on
+            # scanning the package directory.  'package' is only included
+            # in the tuple so that 'find_modules()' and
+            # 'find_package_tuples()' have a consistent interface; it's
+            # ignored here (apart from a sanity check).  Also, 'module' is
+            # the *unqualified* module name (ie. no dots, no package -- we
+            # already know its package!), and 'module_file' is the path to
+            # the .py file, relative to the current directory
+            # (ie. including 'package_dir').
+            package_dir = self.get_package_dir(package)
+            modules = self.find_package_modules(package, package_dir)
+
+            # Now loop over the modules we found, "building" each one (just
+            # copy it to self.build_lib).
+            for package_, module, module_file in modules:
+                assert package == package_
+                self.build_module(module, module_file, package)
+
+    def byte_compile(self, files) -> None:
+        if sys.dont_write_bytecode:
+            self.warn('byte-compiling is disabled, skipping.')
+            return
+
+        from ..util import byte_compile
+
+        prefix = self.build_lib
+        if prefix[-1] != os.sep:
+            prefix = prefix + os.sep
+
+        # XXX this code is essentially the same as the 'byte_compile()
+        # method of the "install_lib" command, except for the determination
+        # of the 'prefix' string.  Hmmm.
+        if self.compile:
+            byte_compile(
+                files, optimize=0, force=self.force, prefix=prefix, dry_run=self.dry_run
+            )
+        if self.optimize > 0:
+            byte_compile(
+                files,
+                optimize=self.optimize,
+                force=self.force,
+                prefix=prefix,
+                dry_run=self.dry_run,
+            )
diff --git a/.venv/lib/python3.12/site-packages/setuptools/_distutils/command/build_scripts.py b/.venv/lib/python3.12/site-packages/setuptools/_distutils/command/build_scripts.py
new file mode 100644
index 00000000..127c51d8
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/setuptools/_distutils/command/build_scripts.py
@@ -0,0 +1,160 @@
+"""distutils.command.build_scripts
+
+Implements the Distutils 'build_scripts' command."""
+
+import os
+import re
+import tokenize
+from distutils._log import log
+from stat import ST_MODE
+from typing import ClassVar
+
+from .._modified import newer
+from ..core import Command
+from ..util import convert_path
+
+shebang_pattern = re.compile('^#!.*python[0-9.]*([ \t].*)?$')
+"""
+Pattern matching a Python interpreter indicated in first line of a script.
+"""
+
+# for Setuptools compatibility
+first_line_re = shebang_pattern
+
+
+class build_scripts(Command):
+    description = "\"build\" scripts (copy and fixup #! line)"
+
+    user_options: ClassVar[list[tuple[str, str, str]]] = [
+        ('build-dir=', 'd', "directory to \"build\" (copy) to"),
+        ('force', 'f', "forcibly build everything (ignore file timestamps"),
+        ('executable=', 'e', "specify final destination interpreter path"),
+    ]
+
+    boolean_options: ClassVar[list[str]] = ['force']
+
+    def initialize_options(self):
+        self.build_dir = None
+        self.scripts = None
+        self.force = None
+        self.executable = None
+
+    def finalize_options(self):
+        self.set_undefined_options(
+            'build',
+            ('build_scripts', 'build_dir'),
+            ('force', 'force'),
+            ('executable', 'executable'),
+        )
+        self.scripts = self.distribution.scripts
+
+    def get_source_files(self):
+        return self.scripts
+
+    def run(self):
+        if not self.scripts:
+            return
+        self.copy_scripts()
+
+    def copy_scripts(self):
+        """
+        Copy each script listed in ``self.scripts``.
+
+        If a script is marked as a Python script (first line matches
+        'shebang_pattern', i.e. starts with ``#!`` and contains
+        "python"), then adjust in the copy the first line to refer to
+        the current Python interpreter.
+        """
+        self.mkpath(self.build_dir)
+        outfiles = []
+        updated_files = []
+        for script in self.scripts:
+            self._copy_script(script, outfiles, updated_files)
+
+        self._change_modes(outfiles)
+
+        return outfiles, updated_files
+
+    def _copy_script(self, script, outfiles, updated_files):
+        shebang_match = None
+        script = convert_path(script)
+        outfile = os.path.join(self.build_dir, os.path.basename(script))
+        outfiles.append(outfile)
+
+        if not self.force and not newer(script, outfile):
+            log.debug("not copying %s (up-to-date)", script)
+            return
+
+        # Always open the file, but ignore failures in dry-run mode
+        # in order to attempt to copy directly.
+        try:
+            f = tokenize.open(script)
+        except OSError:
+            if not self.dry_run:
+                raise
+            f = None
+        else:
+            first_line = f.readline()
+            if not first_line:
+                self.warn(f"{script} is an empty file (skipping)")
+                return
+
+            shebang_match = shebang_pattern.match(first_line)
+
+        updated_files.append(outfile)
+        if shebang_match:
+            log.info("copying and adjusting %s -> %s", script, self.build_dir)
+            if not self.dry_run:
+                post_interp = shebang_match.group(1) or ''
+                shebang = f"#!python{post_interp}\n"
+                self._validate_shebang(shebang, f.encoding)
+                with open(outfile, "w", encoding=f.encoding) as outf:
+                    outf.write(shebang)
+                    outf.writelines(f.readlines())
+            if f:
+                f.close()
+        else:
+            if f:
+                f.close()
+            self.copy_file(script, outfile)
+
+    def _change_modes(self, outfiles):
+        if os.name != 'posix':
+            return
+
+        for file in outfiles:
+            self._change_mode(file)
+
+    def _change_mode(self, file):
+        if self.dry_run:
+            log.info("changing mode of %s", file)
+            return
+
+        oldmode = os.stat(file)[ST_MODE] & 0o7777
+        newmode = (oldmode | 0o555) & 0o7777
+        if newmode != oldmode:
+            log.info("changing mode of %s from %o to %o", file, oldmode, newmode)
+            os.chmod(file, newmode)
+
+    @staticmethod
+    def _validate_shebang(shebang, encoding):
+        # Python parser starts to read a script using UTF-8 until
+        # it gets a #coding:xxx cookie. The shebang has to be the
+        # first line of a file, the #coding:xxx cookie cannot be
+        # written before. So the shebang has to be encodable to
+        # UTF-8.
+        try:
+            shebang.encode('utf-8')
+        except UnicodeEncodeError:
+            raise ValueError(f"The shebang ({shebang!r}) is not encodable to utf-8")
+
+        # If the script is encoded to a custom encoding (use a
+        # #coding:xxx cookie), the shebang has to be encodable to
+        # the script encoding too.
+        try:
+            shebang.encode(encoding)
+        except UnicodeEncodeError:
+            raise ValueError(
+                f"The shebang ({shebang!r}) is not encodable "
+                f"to the script encoding ({encoding})"
+            )
diff --git a/.venv/lib/python3.12/site-packages/setuptools/_distutils/command/check.py b/.venv/lib/python3.12/site-packages/setuptools/_distutils/command/check.py
new file mode 100644
index 00000000..58a823dd
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/setuptools/_distutils/command/check.py
@@ -0,0 +1,152 @@
+"""distutils.command.check
+
+Implements the Distutils 'check' command.
+"""
+
+import contextlib
+from typing import ClassVar
+
+from ..core import Command
+from ..errors import DistutilsSetupError
+
+with contextlib.suppress(ImportError):
+    import docutils.frontend
+    import docutils.nodes
+    import docutils.parsers.rst
+    import docutils.utils
+
+    class SilentReporter(docutils.utils.Reporter):
+        def __init__(
+            self,
+            source,
+            report_level,
+            halt_level,
+            stream=None,
+            debug=False,
+            encoding='ascii',
+            error_handler='replace',
+        ):
+            self.messages = []
+            super().__init__(
+                source, report_level, halt_level, stream, debug, encoding, error_handler
+            )
+
+        def system_message(self, level, message, *children, **kwargs):
+            self.messages.append((level, message, children, kwargs))
+            return docutils.nodes.system_message(
+                message, *children, level=level, type=self.levels[level], **kwargs
+            )
+
+
+class check(Command):
+    """This command checks the meta-data of the package."""
+
+    description = "perform some checks on the package"
+    user_options: ClassVar[list[tuple[str, str, str]]] = [
+        ('metadata', 'm', 'Verify meta-data'),
+        (
+            'restructuredtext',
+            'r',
+            'Checks if long string meta-data syntax are reStructuredText-compliant',
+        ),
+        ('strict', 's', 'Will exit with an error if a check fails'),
+    ]
+
+    boolean_options: ClassVar[list[str]] = ['metadata', 'restructuredtext', 'strict']
+
+    def initialize_options(self):
+        """Sets default values for options."""
+        self.restructuredtext = False
+        self.metadata = 1
+        self.strict = False
+        self._warnings = 0
+
+    def finalize_options(self):
+        pass
+
+    def warn(self, msg):
+        """Counts the number of warnings that occurs."""
+        self._warnings += 1
+        return Command.warn(self, msg)
+
+    def run(self):
+        """Runs the command."""
+        # perform the various tests
+        if self.metadata:
+            self.check_metadata()
+        if self.restructuredtext:
+            if 'docutils' in globals():
+                try:
+                    self.check_restructuredtext()
+                except TypeError as exc:
+                    raise DistutilsSetupError(str(exc))
+            elif self.strict:
+                raise DistutilsSetupError('The docutils package is needed.')
+
+        # let's raise an error in strict mode, if we have at least
+        # one warning
+        if self.strict and self._warnings > 0:
+            raise DistutilsSetupError('Please correct your package.')
+
+    def check_metadata(self):
+        """Ensures that all required elements of meta-data are supplied.
+
+        Required fields:
+            name, version
+
+        Warns if any are missing.
+        """
+        metadata = self.distribution.metadata
+
+        missing = [
+            attr for attr in ('name', 'version') if not getattr(metadata, attr, None)
+        ]
+
+        if missing:
+            self.warn("missing required meta-data: {}".format(', '.join(missing)))
+
+    def check_restructuredtext(self):
+        """Checks if the long string fields are reST-compliant."""
+        data = self.distribution.get_long_description()
+        for warning in self._check_rst_data(data):
+            line = warning[-1].get('line')
+            if line is None:
+                warning = warning[1]
+            else:
+                warning = f'{warning[1]} (line {line})'
+            self.warn(warning)
+
+    def _check_rst_data(self, data):
+        """Returns warnings when the provided data doesn't compile."""
+        # the include and csv_table directives need this to be a path
+        source_path = self.distribution.script_name or 'setup.py'
+        parser = docutils.parsers.rst.Parser()
+        settings = docutils.frontend.OptionParser(
+            components=(docutils.parsers.rst.Parser,)
+        ).get_default_values()
+        settings.tab_width = 4
+        settings.pep_references = None
+        settings.rfc_references = None
+        reporter = SilentReporter(
+            source_path,
+            settings.report_level,
+            settings.halt_level,
+            stream=settings.warning_stream,
+            debug=settings.debug,
+            encoding=settings.error_encoding,
+            error_handler=settings.error_encoding_error_handler,
+        )
+
+        document = docutils.nodes.document(settings, reporter, source=source_path)
+        document.note_source(source_path, -1)
+        try:
+            parser.parse(data, document)
+        except (AttributeError, TypeError) as e:
+            reporter.messages.append((
+                -1,
+                f'Could not finish the parsing: {e}.',
+                '',
+                {},
+            ))
+
+        return reporter.messages
diff --git a/.venv/lib/python3.12/site-packages/setuptools/_distutils/command/clean.py b/.venv/lib/python3.12/site-packages/setuptools/_distutils/command/clean.py
new file mode 100644
index 00000000..23427aba
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/setuptools/_distutils/command/clean.py
@@ -0,0 +1,77 @@
+"""distutils.command.clean
+
+Implements the Distutils 'clean' command."""
+
+# contributed by Bastian Kleineidam <calvin@cs.uni-sb.de>, added 2000-03-18
+
+import os
+from distutils._log import log
+from typing import ClassVar
+
+from ..core import Command
+from ..dir_util import remove_tree
+
+
+class clean(Command):
+    description = "clean up temporary files from 'build' command"
+    user_options = [
+        ('build-base=', 'b', "base build directory [default: 'build.build-base']"),
+        (
+            'build-lib=',
+            None,
+            "build directory for all modules [default: 'build.build-lib']",
+        ),
+        ('build-temp=', 't', "temporary build directory [default: 'build.build-temp']"),
+        (
+            'build-scripts=',
+            None,
+            "build directory for scripts [default: 'build.build-scripts']",
+        ),
+        ('bdist-base=', None, "temporary directory for built distributions"),
+        ('all', 'a', "remove all build output, not just temporary by-products"),
+    ]
+
+    boolean_options: ClassVar[list[str]] = ['all']
+
+    def initialize_options(self):
+        self.build_base = None
+        self.build_lib = None
+        self.build_temp = None
+        self.build_scripts = None
+        self.bdist_base = None
+        self.all = None
+
+    def finalize_options(self):
+        self.set_undefined_options(
+            'build',
+            ('build_base', 'build_base'),
+            ('build_lib', 'build_lib'),
+            ('build_scripts', 'build_scripts'),
+            ('build_temp', 'build_temp'),
+        )
+        self.set_undefined_options('bdist', ('bdist_base', 'bdist_base'))
+
+    def run(self):
+        # remove the build/temp.<plat> directory (unless it's already
+        # gone)
+        if os.path.exists(self.build_temp):
+            remove_tree(self.build_temp, dry_run=self.dry_run)
+        else:
+            log.debug("'%s' does not exist -- can't clean it", self.build_temp)
+
+        if self.all:
+            # remove build directories
+            for directory in (self.build_lib, self.bdist_base, self.build_scripts):
+                if os.path.exists(directory):
+                    remove_tree(directory, dry_run=self.dry_run)
+                else:
+                    log.warning("'%s' does not exist -- can't clean it", directory)
+
+        # just for the heck of it, try to remove the base build directory:
+        # we might have emptied it right now, but if not we don't care
+        if not self.dry_run:
+            try:
+                os.rmdir(self.build_base)
+                log.info("removing '%s'", self.build_base)
+            except OSError:
+                pass
diff --git a/.venv/lib/python3.12/site-packages/setuptools/_distutils/command/config.py b/.venv/lib/python3.12/site-packages/setuptools/_distutils/command/config.py
new file mode 100644
index 00000000..44df8233
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/setuptools/_distutils/command/config.py
@@ -0,0 +1,360 @@
+"""distutils.command.config
+
+Implements the Distutils 'config' command, a (mostly) empty command class
+that exists mainly to be sub-classed by specific module distributions and
+applications.  The idea is that while every "config" command is different,
+at least they're all named the same, and users always see "config" in the
+list of standard commands.  Also, this is a good place to put common
+configure-like tasks: "try to compile this C code", or "figure out where
+this header file lives".
+"""
+
+from __future__ import annotations
+
+import os
+import pathlib
+import re
+from collections.abc import Sequence
+from distutils._log import log
+
+from ..ccompiler import CCompiler, CompileError, LinkError, new_compiler
+from ..core import Command
+from ..errors import DistutilsExecError
+from ..sysconfig import customize_compiler
+
+LANG_EXT = {"c": ".c", "c++": ".cxx"}
+
+
+class config(Command):
+    description = "prepare to build"
+
+    user_options = [
+        ('compiler=', None, "specify the compiler type"),
+        ('cc=', None, "specify the compiler executable"),
+        ('include-dirs=', 'I', "list of directories to search for header files"),
+        ('define=', 'D', "C preprocessor macros to define"),
+        ('undef=', 'U', "C preprocessor macros to undefine"),
+        ('libraries=', 'l', "external C libraries to link with"),
+        ('library-dirs=', 'L', "directories to search for external C libraries"),
+        ('noisy', None, "show every action (compile, link, run, ...) taken"),
+        (
+            'dump-source',
+            None,
+            "dump generated source files before attempting to compile them",
+        ),
+    ]
+
+    # The three standard command methods: since the "config" command
+    # does nothing by default, these are empty.
+
+    def initialize_options(self):
+        self.compiler = None
+        self.cc = None
+        self.include_dirs = None
+        self.libraries = None
+        self.library_dirs = None
+
+        # maximal output for now
+        self.noisy = 1
+        self.dump_source = 1
+
+        # list of temporary files generated along-the-way that we have
+        # to clean at some point
+        self.temp_files = []
+
+    def finalize_options(self):
+        if self.include_dirs is None:
+            self.include_dirs = self.distribution.include_dirs or []
+        elif isinstance(self.include_dirs, str):
+            self.include_dirs = self.include_dirs.split(os.pathsep)
+
+        if self.libraries is None:
+            self.libraries = []
+        elif isinstance(self.libraries, str):
+            self.libraries = [self.libraries]
+
+        if self.library_dirs is None:
+            self.library_dirs = []
+        elif isinstance(self.library_dirs, str):
+            self.library_dirs = self.library_dirs.split(os.pathsep)
+
+    def run(self):
+        pass
+
+    # Utility methods for actual "config" commands.  The interfaces are
+    # loosely based on Autoconf macros of similar names.  Sub-classes
+    # may use these freely.
+
+    def _check_compiler(self):
+        """Check that 'self.compiler' really is a CCompiler object;
+        if not, make it one.
+        """
+        # We do this late, and only on-demand, because this is an expensive
+        # import.
+        if not isinstance(self.compiler, CCompiler):
+            self.compiler = new_compiler(
+                compiler=self.compiler, dry_run=self.dry_run, force=True
+            )
+            customize_compiler(self.compiler)
+            if self.include_dirs:
+                self.compiler.set_include_dirs(self.include_dirs)
+            if self.libraries:
+                self.compiler.set_libraries(self.libraries)
+            if self.library_dirs:
+                self.compiler.set_library_dirs(self.library_dirs)
+
+    def _gen_temp_sourcefile(self, body, headers, lang):
+        filename = "_configtest" + LANG_EXT[lang]
+        with open(filename, "w", encoding='utf-8') as file:
+            if headers:
+                for header in headers:
+                    file.write(f"#include <{header}>\n")
+                file.write("\n")
+            file.write(body)
+            if body[-1] != "\n":
+                file.write("\n")
+        return filename
+
+    def _preprocess(self, body, headers, include_dirs, lang):
+        src = self._gen_temp_sourcefile(body, headers, lang)
+        out = "_configtest.i"
+        self.temp_files.extend([src, out])
+        self.compiler.preprocess(src, out, include_dirs=include_dirs)
+        return (src, out)
+
+    def _compile(self, body, headers, include_dirs, lang):
+        src = self._gen_temp_sourcefile(body, headers, lang)
+        if self.dump_source:
+            dump_file(src, f"compiling '{src}':")
+        (obj,) = self.compiler.object_filenames([src])
+        self.temp_files.extend([src, obj])
+        self.compiler.compile([src], include_dirs=include_dirs)
+        return (src, obj)
+
+    def _link(self, body, headers, include_dirs, libraries, library_dirs, lang):
+        (src, obj) = self._compile(body, headers, include_dirs, lang)
+        prog = os.path.splitext(os.path.basename(src))[0]
+        self.compiler.link_executable(
+            [obj],
+            prog,
+            libraries=libraries,
+            library_dirs=library_dirs,
+            target_lang=lang,
+        )
+
+        if self.compiler.exe_extension is not None:
+            prog = prog + self.compiler.exe_extension
+        self.temp_files.append(prog)
+
+        return (src, obj, prog)
+
+    def _clean(self, *filenames):
+        if not filenames:
+            filenames = self.temp_files
+            self.temp_files = []
+        log.info("removing: %s", ' '.join(filenames))
+        for filename in filenames:
+            try:
+                os.remove(filename)
+            except OSError:
+                pass
+
+    # XXX these ignore the dry-run flag: what to do, what to do? even if
+    # you want a dry-run build, you still need some sort of configuration
+    # info.  My inclination is to make it up to the real config command to
+    # consult 'dry_run', and assume a default (minimal) configuration if
+    # true.  The problem with trying to do it here is that you'd have to
+    # return either true or false from all the 'try' methods, neither of
+    # which is correct.
+
+    # XXX need access to the header search path and maybe default macros.
+
+    def try_cpp(self, body=None, headers=None, include_dirs=None, lang="c"):
+        """Construct a source file from 'body' (a string containing lines
+        of C/C++ code) and 'headers' (a list of header files to include)
+        and run it through the preprocessor.  Return true if the
+        preprocessor succeeded, false if there were any errors.
+        ('body' probably isn't of much use, but what the heck.)
+        """
+        self._check_compiler()
+        ok = True
+        try:
+            self._preprocess(body, headers, include_dirs, lang)
+        except CompileError:
+            ok = False
+
+        self._clean()
+        return ok
+
+    def search_cpp(self, pattern, body=None, headers=None, include_dirs=None, lang="c"):
+        """Construct a source file (just like 'try_cpp()'), run it through
+        the preprocessor, and return true if any line of the output matches
+        'pattern'.  'pattern' should either be a compiled regex object or a
+        string containing a regex.  If both 'body' and 'headers' are None,
+        preprocesses an empty file -- which can be useful to determine the
+        symbols the preprocessor and compiler set by default.
+        """
+        self._check_compiler()
+        src, out = self._preprocess(body, headers, include_dirs, lang)
+
+        if isinstance(pattern, str):
+            pattern = re.compile(pattern)
+
+        with open(out, encoding='utf-8') as file:
+            match = any(pattern.search(line) for line in file)
+
+        self._clean()
+        return match
+
+    def try_compile(self, body, headers=None, include_dirs=None, lang="c"):
+        """Try to compile a source file built from 'body' and 'headers'.
+        Return true on success, false otherwise.
+        """
+        self._check_compiler()
+        try:
+            self._compile(body, headers, include_dirs, lang)
+            ok = True
+        except CompileError:
+            ok = False
+
+        log.info(ok and "success!" or "failure.")
+        self._clean()
+        return ok
+
+    def try_link(
+        self,
+        body,
+        headers=None,
+        include_dirs=None,
+        libraries=None,
+        library_dirs=None,
+        lang="c",
+    ):
+        """Try to compile and link a source file, built from 'body' and
+        'headers', to executable form.  Return true on success, false
+        otherwise.
+        """
+        self._check_compiler()
+        try:
+            self._link(body, headers, include_dirs, libraries, library_dirs, lang)
+            ok = True
+        except (CompileError, LinkError):
+            ok = False
+
+        log.info(ok and "success!" or "failure.")
+        self._clean()
+        return ok
+
+    def try_run(
+        self,
+        body,
+        headers=None,
+        include_dirs=None,
+        libraries=None,
+        library_dirs=None,
+        lang="c",
+    ):
+        """Try to compile, link to an executable, and run a program
+        built from 'body' and 'headers'.  Return true on success, false
+        otherwise.
+        """
+        self._check_compiler()
+        try:
+            src, obj, exe = self._link(
+                body, headers, include_dirs, libraries, library_dirs, lang
+            )
+            self.spawn([exe])
+            ok = True
+        except (CompileError, LinkError, DistutilsExecError):
+            ok = False
+
+        log.info(ok and "success!" or "failure.")
+        self._clean()
+        return ok
+
+    # -- High-level methods --------------------------------------------
+    # (these are the ones that are actually likely to be useful
+    # when implementing a real-world config command!)
+
+    def check_func(
+        self,
+        func,
+        headers=None,
+        include_dirs=None,
+        libraries=None,
+        library_dirs=None,
+        decl=False,
+        call=False,
+    ):
+        """Determine if function 'func' is available by constructing a
+        source file that refers to 'func', and compiles and links it.
+        If everything succeeds, returns true; otherwise returns false.
+
+        The constructed source file starts out by including the header
+        files listed in 'headers'.  If 'decl' is true, it then declares
+        'func' (as "int func()"); you probably shouldn't supply 'headers'
+        and set 'decl' true in the same call, or you might get errors about
+        a conflicting declarations for 'func'.  Finally, the constructed
+        'main()' function either references 'func' or (if 'call' is true)
+        calls it.  'libraries' and 'library_dirs' are used when
+        linking.
+        """
+        self._check_compiler()
+        body = []
+        if decl:
+            body.append(f"int {func} ();")
+        body.append("int main () {")
+        if call:
+            body.append(f"  {func}();")
+        else:
+            body.append(f"  {func};")
+        body.append("}")
+        body = "\n".join(body) + "\n"
+
+        return self.try_link(body, headers, include_dirs, libraries, library_dirs)
+
+    def check_lib(
+        self,
+        library,
+        library_dirs=None,
+        headers=None,
+        include_dirs=None,
+        other_libraries: Sequence[str] = [],
+    ):
+        """Determine if 'library' is available to be linked against,
+        without actually checking that any particular symbols are provided
+        by it.  'headers' will be used in constructing the source file to
+        be compiled, but the only effect of this is to check if all the
+        header files listed are available.  Any libraries listed in
+        'other_libraries' will be included in the link, in case 'library'
+        has symbols that depend on other libraries.
+        """
+        self._check_compiler()
+        return self.try_link(
+            "int main (void) { }",
+            headers,
+            include_dirs,
+            [library] + list(other_libraries),
+            library_dirs,
+        )
+
+    def check_header(self, header, include_dirs=None, library_dirs=None, lang="c"):
+        """Determine if the system header file named by 'header_file'
+        exists and can be found by the preprocessor; return true if so,
+        false otherwise.
+        """
+        return self.try_cpp(
+            body="/* No body */", headers=[header], include_dirs=include_dirs
+        )
+
+
+def dump_file(filename, head=None):
+    """Dumps a file content into log.info.
+
+    If head is not None, will be dumped before the file content.
+    """
+    if head is None:
+        log.info('%s', filename)
+    else:
+        log.info(head)
+    log.info(pathlib.Path(filename).read_text(encoding='utf-8'))
diff --git a/.venv/lib/python3.12/site-packages/setuptools/_distutils/command/install.py b/.venv/lib/python3.12/site-packages/setuptools/_distutils/command/install.py
new file mode 100644
index 00000000..dc17e56a
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/setuptools/_distutils/command/install.py
@@ -0,0 +1,805 @@
+"""distutils.command.install
+
+Implements the Distutils 'install' command."""
+
+from __future__ import annotations
+
+import collections
+import contextlib
+import itertools
+import os
+import sys
+import sysconfig
+from distutils._log import log
+from site import USER_BASE, USER_SITE
+from typing import ClassVar
+
+from ..core import Command
+from ..debug import DEBUG
+from ..errors import DistutilsOptionError, DistutilsPlatformError
+from ..file_util import write_file
+from ..sysconfig import get_config_vars
+from ..util import change_root, convert_path, get_platform, subst_vars
+from . import _framework_compat as fw
+
+HAS_USER_SITE = True
+
+WINDOWS_SCHEME = {
+    'purelib': '{base}/Lib/site-packages',
+    'platlib': '{base}/Lib/site-packages',
+    'headers': '{base}/Include/{dist_name}',
+    'scripts': '{base}/Scripts',
+    'data': '{base}',
+}
+
+INSTALL_SCHEMES = {
+    'posix_prefix': {
+        'purelib': '{base}/lib/{implementation_lower}{py_version_short}/site-packages',
+        'platlib': '{platbase}/{platlibdir}/{implementation_lower}'
+        '{py_version_short}/site-packages',
+        'headers': '{base}/include/{implementation_lower}'
+        '{py_version_short}{abiflags}/{dist_name}',
+        'scripts': '{base}/bin',
+        'data': '{base}',
+    },
+    'posix_home': {
+        'purelib': '{base}/lib/{implementation_lower}',
+        'platlib': '{base}/{platlibdir}/{implementation_lower}',
+        'headers': '{base}/include/{implementation_lower}/{dist_name}',
+        'scripts': '{base}/bin',
+        'data': '{base}',
+    },
+    'nt': WINDOWS_SCHEME,
+    'pypy': {
+        'purelib': '{base}/site-packages',
+        'platlib': '{base}/site-packages',
+        'headers': '{base}/include/{dist_name}',
+        'scripts': '{base}/bin',
+        'data': '{base}',
+    },
+    'pypy_nt': {
+        'purelib': '{base}/site-packages',
+        'platlib': '{base}/site-packages',
+        'headers': '{base}/include/{dist_name}',
+        'scripts': '{base}/Scripts',
+        'data': '{base}',
+    },
+}
+
+# user site schemes
+if HAS_USER_SITE:
+    INSTALL_SCHEMES['nt_user'] = {
+        'purelib': '{usersite}',
+        'platlib': '{usersite}',
+        'headers': '{userbase}/{implementation}{py_version_nodot_plat}'
+        '/Include/{dist_name}',
+        'scripts': '{userbase}/{implementation}{py_version_nodot_plat}/Scripts',
+        'data': '{userbase}',
+    }
+
+    INSTALL_SCHEMES['posix_user'] = {
+        'purelib': '{usersite}',
+        'platlib': '{usersite}',
+        'headers': '{userbase}/include/{implementation_lower}'
+        '{py_version_short}{abiflags}/{dist_name}',
+        'scripts': '{userbase}/bin',
+        'data': '{userbase}',
+    }
+
+
+INSTALL_SCHEMES.update(fw.schemes)
+
+
+# The keys to an installation scheme; if any new types of files are to be
+# installed, be sure to add an entry to every installation scheme above,
+# and to SCHEME_KEYS here.
+SCHEME_KEYS = ('purelib', 'platlib', 'headers', 'scripts', 'data')
+
+
+def _load_sysconfig_schemes():
+    with contextlib.suppress(AttributeError):
+        return {
+            scheme: sysconfig.get_paths(scheme, expand=False)
+            for scheme in sysconfig.get_scheme_names()
+        }
+
+
+def _load_schemes():
+    """
+    Extend default schemes with schemes from sysconfig.
+    """
+
+    sysconfig_schemes = _load_sysconfig_schemes() or {}
+
+    return {
+        scheme: {
+            **INSTALL_SCHEMES.get(scheme, {}),
+            **sysconfig_schemes.get(scheme, {}),
+        }
+        for scheme in set(itertools.chain(INSTALL_SCHEMES, sysconfig_schemes))
+    }
+
+
+def _get_implementation():
+    if hasattr(sys, 'pypy_version_info'):
+        return 'PyPy'
+    else:
+        return 'Python'
+
+
+def _select_scheme(ob, name):
+    scheme = _inject_headers(name, _load_scheme(_resolve_scheme(name)))
+    vars(ob).update(_remove_set(ob, _scheme_attrs(scheme)))
+
+
+def _remove_set(ob, attrs):
+    """
+    Include only attrs that are None in ob.
+    """
+    return {key: value for key, value in attrs.items() if getattr(ob, key) is None}
+
+
+def _resolve_scheme(name):
+    os_name, sep, key = name.partition('_')
+    try:
+        resolved = sysconfig.get_preferred_scheme(key)
+    except Exception:
+        resolved = fw.scheme(name)
+    return resolved
+
+
+def _load_scheme(name):
+    return _load_schemes()[name]
+
+
+def _inject_headers(name, scheme):
+    """
+    Given a scheme name and the resolved scheme,
+    if the scheme does not include headers, resolve
+    the fallback scheme for the name and use headers
+    from it. pypa/distutils#88
+    """
+    # Bypass the preferred scheme, which may not
+    # have defined headers.
+    fallback = _load_scheme(name)
+    scheme.setdefault('headers', fallback['headers'])
+    return scheme
+
+
+def _scheme_attrs(scheme):
+    """Resolve install directories by applying the install schemes."""
+    return {f'install_{key}': scheme[key] for key in SCHEME_KEYS}
+
+
+class install(Command):
+    description = "install everything from build directory"
+
+    user_options = [
+        # Select installation scheme and set base director(y|ies)
+        ('prefix=', None, "installation prefix"),
+        ('exec-prefix=', None, "(Unix only) prefix for platform-specific files"),
+        ('home=', None, "(Unix only) home directory to install under"),
+        # Or, just set the base director(y|ies)
+        (
+            'install-base=',
+            None,
+            "base installation directory (instead of --prefix or --home)",
+        ),
+        (
+            'install-platbase=',
+            None,
+            "base installation directory for platform-specific files (instead of --exec-prefix or --home)",
+        ),
+        ('root=', None, "install everything relative to this alternate root directory"),
+        # Or, explicitly set the installation scheme
+        (
+            'install-purelib=',
+            None,
+            "installation directory for pure Python module distributions",
+        ),
+        (
+            'install-platlib=',
+            None,
+            "installation directory for non-pure module distributions",
+        ),
+        (
+            'install-lib=',
+            None,
+            "installation directory for all module distributions (overrides --install-purelib and --install-platlib)",
+        ),
+        ('install-headers=', None, "installation directory for C/C++ headers"),
+        ('install-scripts=', None, "installation directory for Python scripts"),
+        ('install-data=', None, "installation directory for data files"),
+        # Byte-compilation options -- see install_lib.py for details, as
+        # these are duplicated from there (but only install_lib does
+        # anything with them).
+        ('compile', 'c', "compile .py to .pyc [default]"),
+        ('no-compile', None, "don't compile .py files"),
+        (
+            'optimize=',
+            'O',
+            "also compile with optimization: -O1 for \"python -O\", "
+            "-O2 for \"python -OO\", and -O0 to disable [default: -O0]",
+        ),
+        # Miscellaneous control options
+        ('force', 'f', "force installation (overwrite any existing files)"),
+        ('skip-build', None, "skip rebuilding everything (for testing/debugging)"),
+        # Where to install documentation (eventually!)
+        # ('doc-format=', None, "format of documentation to generate"),
+        # ('install-man=', None, "directory for Unix man pages"),
+        # ('install-html=', None, "directory for HTML documentation"),
+        # ('install-info=', None, "directory for GNU info files"),
+        ('record=', None, "filename in which to record list of installed files"),
+    ]
+
+    boolean_options: ClassVar[list[str]] = ['compile', 'force', 'skip-build']
+
+    if HAS_USER_SITE:
+        user_options.append((
+            'user',
+            None,
+            f"install in user site-package '{USER_SITE}'",
+        ))
+        boolean_options.append('user')
+
+    negative_opt: ClassVar[dict[str, str]] = {'no-compile': 'compile'}
+
+    def initialize_options(self) -> None:
+        """Initializes options."""
+        # High-level options: these select both an installation base
+        # and scheme.
+        self.prefix: str | None = None
+        self.exec_prefix: str | None = None
+        self.home: str | None = None
+        self.user = False
+
+        # These select only the installation base; it's up to the user to
+        # specify the installation scheme (currently, that means supplying
+        # the --install-{platlib,purelib,scripts,data} options).
+        self.install_base = None
+        self.install_platbase = None
+        self.root: str | None = None
+
+        # These options are the actual installation directories; if not
+        # supplied by the user, they are filled in using the installation
+        # scheme implied by prefix/exec-prefix/home and the contents of
+        # that installation scheme.
+        self.install_purelib = None  # for pure module distributions
+        self.install_platlib = None  # non-pure (dists w/ extensions)
+        self.install_headers = None  # for C/C++ headers
+        self.install_lib: str | None = None  # set to either purelib or platlib
+        self.install_scripts = None
+        self.install_data = None
+        self.install_userbase = USER_BASE
+        self.install_usersite = USER_SITE
+
+        self.compile = None
+        self.optimize = None
+
+        # Deprecated
+        # These two are for putting non-packagized distributions into their
+        # own directory and creating a .pth file if it makes sense.
+        # 'extra_path' comes from the setup file; 'install_path_file' can
+        # be turned off if it makes no sense to install a .pth file.  (But
+        # better to install it uselessly than to guess wrong and not
+        # install it when it's necessary and would be used!)  Currently,
+        # 'install_path_file' is always true unless some outsider meddles
+        # with it.
+        self.extra_path = None
+        self.install_path_file = True
+
+        # 'force' forces installation, even if target files are not
+        # out-of-date.  'skip_build' skips running the "build" command,
+        # handy if you know it's not necessary.  'warn_dir' (which is *not*
+        # a user option, it's just there so the bdist_* commands can turn
+        # it off) determines whether we warn about installing to a
+        # directory not in sys.path.
+        self.force = False
+        self.skip_build = False
+        self.warn_dir = True
+
+        # These are only here as a conduit from the 'build' command to the
+        # 'install_*' commands that do the real work.  ('build_base' isn't
+        # actually used anywhere, but it might be useful in future.)  They
+        # are not user options, because if the user told the install
+        # command where the build directory is, that wouldn't affect the
+        # build command.
+        self.build_base = None
+        self.build_lib = None
+
+        # Not defined yet because we don't know anything about
+        # documentation yet.
+        # self.install_man = None
+        # self.install_html = None
+        # self.install_info = None
+
+        self.record = None
+
+    # -- Option finalizing methods -------------------------------------
+    # (This is rather more involved than for most commands,
+    # because this is where the policy for installing third-
+    # party Python modules on various platforms given a wide
+    # array of user input is decided.  Yes, it's quite complex!)
+
+    def finalize_options(self) -> None:  # noqa: C901
+        """Finalizes options."""
+        # This method (and its helpers, like 'finalize_unix()',
+        # 'finalize_other()', and 'select_scheme()') is where the default
+        # installation directories for modules, extension modules, and
+        # anything else we care to install from a Python module
+        # distribution.  Thus, this code makes a pretty important policy
+        # statement about how third-party stuff is added to a Python
+        # installation!  Note that the actual work of installation is done
+        # by the relatively simple 'install_*' commands; they just take
+        # their orders from the installation directory options determined
+        # here.
+
+        # Check for errors/inconsistencies in the options; first, stuff
+        # that's wrong on any platform.
+
+        if (self.prefix or self.exec_prefix or self.home) and (
+            self.install_base or self.install_platbase
+        ):
+            raise DistutilsOptionError(
+                "must supply either prefix/exec-prefix/home or install-base/install-platbase -- not both"
+            )
+
+        if self.home and (self.prefix or self.exec_prefix):
+            raise DistutilsOptionError(
+                "must supply either home or prefix/exec-prefix -- not both"
+            )
+
+        if self.user and (
+            self.prefix
+            or self.exec_prefix
+            or self.home
+            or self.install_base
+            or self.install_platbase
+        ):
+            raise DistutilsOptionError(
+                "can't combine user with prefix, "
+                "exec_prefix/home, or install_(plat)base"
+            )
+
+        # Next, stuff that's wrong (or dubious) only on certain platforms.
+        if os.name != "posix":
+            if self.exec_prefix:
+                self.warn("exec-prefix option ignored on this platform")
+                self.exec_prefix = None
+
+        # Now the interesting logic -- so interesting that we farm it out
+        # to other methods.  The goal of these methods is to set the final
+        # values for the install_{lib,scripts,data,...}  options, using as
+        # input a heady brew of prefix, exec_prefix, home, install_base,
+        # install_platbase, user-supplied versions of
+        # install_{purelib,platlib,lib,scripts,data,...}, and the
+        # install schemes.  Phew!
+
+        self.dump_dirs("pre-finalize_{unix,other}")
+
+        if os.name == 'posix':
+            self.finalize_unix()
+        else:
+            self.finalize_other()
+
+        self.dump_dirs("post-finalize_{unix,other}()")
+
+        # Expand configuration variables, tilde, etc. in self.install_base
+        # and self.install_platbase -- that way, we can use $base or
+        # $platbase in the other installation directories and not worry
+        # about needing recursive variable expansion (shudder).
+
+        py_version = sys.version.split()[0]
+        (prefix, exec_prefix) = get_config_vars('prefix', 'exec_prefix')
+        try:
+            abiflags = sys.abiflags
+        except AttributeError:
+            # sys.abiflags may not be defined on all platforms.
+            abiflags = ''
+        local_vars = {
+            'dist_name': self.distribution.get_name(),
+            'dist_version': self.distribution.get_version(),
+            'dist_fullname': self.distribution.get_fullname(),
+            'py_version': py_version,
+            'py_version_short': f'{sys.version_info.major}.{sys.version_info.minor}',
+            'py_version_nodot': f'{sys.version_info.major}{sys.version_info.minor}',
+            'sys_prefix': prefix,
+            'prefix': prefix,
+            'sys_exec_prefix': exec_prefix,
+            'exec_prefix': exec_prefix,
+            'abiflags': abiflags,
+            'platlibdir': getattr(sys, 'platlibdir', 'lib'),
+            'implementation_lower': _get_implementation().lower(),
+            'implementation': _get_implementation(),
+        }
+
+        # vars for compatibility on older Pythons
+        compat_vars = dict(
+            # Python 3.9 and earlier
+            py_version_nodot_plat=getattr(sys, 'winver', '').replace('.', ''),
+        )
+
+        if HAS_USER_SITE:
+            local_vars['userbase'] = self.install_userbase
+            local_vars['usersite'] = self.install_usersite
+
+        self.config_vars = collections.ChainMap(
+            local_vars,
+            sysconfig.get_config_vars(),
+            compat_vars,
+            fw.vars(),
+        )
+
+        self.expand_basedirs()
+
+        self.dump_dirs("post-expand_basedirs()")
+
+        # Now define config vars for the base directories so we can expand
+        # everything else.
+        local_vars['base'] = self.install_base
+        local_vars['platbase'] = self.install_platbase
+
+        if DEBUG:
+            from pprint import pprint
+
+            print("config vars:")
+            pprint(dict(self.config_vars))
+
+        # Expand "~" and configuration variables in the installation
+        # directories.
+        self.expand_dirs()
+
+        self.dump_dirs("post-expand_dirs()")
+
+        # Create directories in the home dir:
+        if self.user:
+            self.create_home_path()
+
+        # Pick the actual directory to install all modules to: either
+        # install_purelib or install_platlib, depending on whether this
+        # module distribution is pure or not.  Of course, if the user
+        # already specified install_lib, use their selection.
+        if self.install_lib is None:
+            if self.distribution.has_ext_modules():  # has extensions: non-pure
+                self.install_lib = self.install_platlib
+            else:
+                self.install_lib = self.install_purelib
+
+        # Convert directories from Unix /-separated syntax to the local
+        # convention.
+        self.convert_paths(
+            'lib',
+            'purelib',
+            'platlib',
+            'scripts',
+            'data',
+            'headers',
+            'userbase',
+            'usersite',
+        )
+
+        # Deprecated
+        # Well, we're not actually fully completely finalized yet: we still
+        # have to deal with 'extra_path', which is the hack for allowing
+        # non-packagized module distributions (hello, Numerical Python!) to
+        # get their own directories.
+        self.handle_extra_path()
+        self.install_libbase = self.install_lib  # needed for .pth file
+        self.install_lib = os.path.join(self.install_lib, self.extra_dirs)
+
+        # If a new root directory was supplied, make all the installation
+        # dirs relative to it.
+        if self.root is not None:
+            self.change_roots(
+                'libbase', 'lib', 'purelib', 'platlib', 'scripts', 'data', 'headers'
+            )
+
+        self.dump_dirs("after prepending root")
+
+        # Find out the build directories, ie. where to install from.
+        self.set_undefined_options(
+            'build', ('build_base', 'build_base'), ('build_lib', 'build_lib')
+        )
+
+        # Punt on doc directories for now -- after all, we're punting on
+        # documentation completely!
+
+    def dump_dirs(self, msg) -> None:
+        """Dumps the list of user options."""
+        if not DEBUG:
+            return
+        from ..fancy_getopt import longopt_xlate
+
+        log.debug(msg + ":")
+        for opt in self.user_options:
+            opt_name = opt[0]
+            if opt_name[-1] == "=":
+                opt_name = opt_name[0:-1]
+            if opt_name in self.negative_opt:
+                opt_name = self.negative_opt[opt_name]
+                opt_name = opt_name.translate(longopt_xlate)
+                val = not getattr(self, opt_name)
+            else:
+                opt_name = opt_name.translate(longopt_xlate)
+                val = getattr(self, opt_name)
+            log.debug("  %s: %s", opt_name, val)
+
+    def finalize_unix(self) -> None:
+        """Finalizes options for posix platforms."""
+        if self.install_base is not None or self.install_platbase is not None:
+            incomplete_scheme = (
+                (
+                    self.install_lib is None
+                    and self.install_purelib is None
+                    and self.install_platlib is None
+                )
+                or self.install_headers is None
+                or self.install_scripts is None
+                or self.install_data is None
+            )
+            if incomplete_scheme:
+                raise DistutilsOptionError(
+                    "install-base or install-platbase supplied, but "
+                    "installation scheme is incomplete"
+                )
+            return
+
+        if self.user:
+            if self.install_userbase is None:
+                raise DistutilsPlatformError("User base directory is not specified")
+            self.install_base = self.install_platbase = self.install_userbase
+            self.select_scheme("posix_user")
+        elif self.home is not None:
+            self.install_base = self.install_platbase = self.home
+            self.select_scheme("posix_home")
+        else:
+            if self.prefix is None:
+                if self.exec_prefix is not None:
+                    raise DistutilsOptionError(
+                        "must not supply exec-prefix without prefix"
+                    )
+
+                # Allow Fedora to add components to the prefix
+                _prefix_addition = getattr(sysconfig, '_prefix_addition', "")
+
+                self.prefix = os.path.normpath(sys.prefix) + _prefix_addition
+                self.exec_prefix = os.path.normpath(sys.exec_prefix) + _prefix_addition
+
+            else:
+                if self.exec_prefix is None:
+                    self.exec_prefix = self.prefix
+
+            self.install_base = self.prefix
+            self.install_platbase = self.exec_prefix
+            self.select_scheme("posix_prefix")
+
+    def finalize_other(self) -> None:
+        """Finalizes options for non-posix platforms"""
+        if self.user:
+            if self.install_userbase is None:
+                raise DistutilsPlatformError("User base directory is not specified")
+            self.install_base = self.install_platbase = self.install_userbase
+            self.select_scheme(os.name + "_user")
+        elif self.home is not None:
+            self.install_base = self.install_platbase = self.home
+            self.select_scheme("posix_home")
+        else:
+            if self.prefix is None:
+                self.prefix = os.path.normpath(sys.prefix)
+
+            self.install_base = self.install_platbase = self.prefix
+            try:
+                self.select_scheme(os.name)
+            except KeyError:
+                raise DistutilsPlatformError(
+                    f"I don't know how to install stuff on '{os.name}'"
+                )
+
+    def select_scheme(self, name) -> None:
+        _select_scheme(self, name)
+
+    def _expand_attrs(self, attrs):
+        for attr in attrs:
+            val = getattr(self, attr)
+            if val is not None:
+                if os.name in ('posix', 'nt'):
+                    val = os.path.expanduser(val)
+                val = subst_vars(val, self.config_vars)
+                setattr(self, attr, val)
+
+    def expand_basedirs(self) -> None:
+        """Calls `os.path.expanduser` on install_base, install_platbase and
+        root."""
+        self._expand_attrs(['install_base', 'install_platbase', 'root'])
+
+    def expand_dirs(self) -> None:
+        """Calls `os.path.expanduser` on install dirs."""
+        self._expand_attrs([
+            'install_purelib',
+            'install_platlib',
+            'install_lib',
+            'install_headers',
+            'install_scripts',
+            'install_data',
+        ])
+
+    def convert_paths(self, *names) -> None:
+        """Call `convert_path` over `names`."""
+        for name in names:
+            attr = "install_" + name
+            setattr(self, attr, convert_path(getattr(self, attr)))
+
+    def handle_extra_path(self) -> None:
+        """Set `path_file` and `extra_dirs` using `extra_path`."""
+        if self.extra_path is None:
+            self.extra_path = self.distribution.extra_path
+
+        if self.extra_path is not None:
+            log.warning(
+                "Distribution option extra_path is deprecated. "
+                "See issue27919 for details."
+            )
+            if isinstance(self.extra_path, str):
+                self.extra_path = self.extra_path.split(',')
+
+            if len(self.extra_path) == 1:
+                path_file = extra_dirs = self.extra_path[0]
+            elif len(self.extra_path) == 2:
+                path_file, extra_dirs = self.extra_path
+            else:
+                raise DistutilsOptionError(
+                    "'extra_path' option must be a list, tuple, or "
+                    "comma-separated string with 1 or 2 elements"
+                )
+
+            # convert to local form in case Unix notation used (as it
+            # should be in setup scripts)
+            extra_dirs = convert_path(extra_dirs)
+        else:
+            path_file = None
+            extra_dirs = ''
+
+        # XXX should we warn if path_file and not extra_dirs? (in which
+        # case the path file would be harmless but pointless)
+        self.path_file = path_file
+        self.extra_dirs = extra_dirs
+
+    def change_roots(self, *names) -> None:
+        """Change the install directories pointed by name using root."""
+        for name in names:
+            attr = "install_" + name
+            setattr(self, attr, change_root(self.root, getattr(self, attr)))
+
+    def create_home_path(self) -> None:
+        """Create directories under ~."""
+        if not self.user:
+            return
+        home = convert_path(os.path.expanduser("~"))
+        for path in self.config_vars.values():
+            if str(path).startswith(home) and not os.path.isdir(path):
+                self.debug_print(f"os.makedirs('{path}', 0o700)")
+                os.makedirs(path, 0o700)
+
+    # -- Command execution methods -------------------------------------
+
+    def run(self):
+        """Runs the command."""
+        # Obviously have to build before we can install
+        if not self.skip_build:
+            self.run_command('build')
+            # If we built for any other platform, we can't install.
+            build_plat = self.distribution.get_command_obj('build').plat_name
+            # check warn_dir - it is a clue that the 'install' is happening
+            # internally, and not to sys.path, so we don't check the platform
+            # matches what we are running.
+            if self.warn_dir and build_plat != get_platform():
+                raise DistutilsPlatformError("Can't install when cross-compiling")
+
+        # Run all sub-commands (at least those that need to be run)
+        for cmd_name in self.get_sub_commands():
+            self.run_command(cmd_name)
+
+        if self.path_file:
+            self.create_path_file()
+
+        # write list of installed files, if requested.
+        if self.record:
+            outputs = self.get_outputs()
+            if self.root:  # strip any package prefix
+                root_len = len(self.root)
+                for counter in range(len(outputs)):
+                    outputs[counter] = outputs[counter][root_len:]
+            self.execute(
+                write_file,
+                (self.record, outputs),
+                f"writing list of installed files to '{self.record}'",
+            )
+
+        sys_path = map(os.path.normpath, sys.path)
+        sys_path = map(os.path.normcase, sys_path)
+        install_lib = os.path.normcase(os.path.normpath(self.install_lib))
+        if (
+            self.warn_dir
+            and not (self.path_file and self.install_path_file)
+            and install_lib not in sys_path
+        ):
+            log.debug(
+                (
+                    "modules installed to '%s', which is not in "
+                    "Python's module search path (sys.path) -- "
+                    "you'll have to change the search path yourself"
+                ),
+                self.install_lib,
+            )
+
+    def create_path_file(self):
+        """Creates the .pth file"""
+        filename = os.path.join(self.install_libbase, self.path_file + ".pth")
+        if self.install_path_file:
+            self.execute(
+                write_file, (filename, [self.extra_dirs]), f"creating {filename}"
+            )
+        else:
+            self.warn(f"path file '{filename}' not created")
+
+    # -- Reporting methods ---------------------------------------------
+
+    def get_outputs(self):
+        """Assembles the outputs of all the sub-commands."""
+        outputs = []
+        for cmd_name in self.get_sub_commands():
+            cmd = self.get_finalized_command(cmd_name)
+            # Add the contents of cmd.get_outputs(), ensuring
+            # that outputs doesn't contain duplicate entries
+            for filename in cmd.get_outputs():
+                if filename not in outputs:
+                    outputs.append(filename)
+
+        if self.path_file and self.install_path_file:
+            outputs.append(os.path.join(self.install_libbase, self.path_file + ".pth"))
+
+        return outputs
+
+    def get_inputs(self):
+        """Returns the inputs of all the sub-commands"""
+        # XXX gee, this looks familiar ;-(
+        inputs = []
+        for cmd_name in self.get_sub_commands():
+            cmd = self.get_finalized_command(cmd_name)
+            inputs.extend(cmd.get_inputs())
+
+        return inputs
+
+    # -- Predicates for sub-command list -------------------------------
+
+    def has_lib(self):
+        """Returns true if the current distribution has any Python
+        modules to install."""
+        return (
+            self.distribution.has_pure_modules() or self.distribution.has_ext_modules()
+        )
+
+    def has_headers(self):
+        """Returns true if the current distribution has any headers to
+        install."""
+        return self.distribution.has_headers()
+
+    def has_scripts(self):
+        """Returns true if the current distribution has any scripts to.
+        install."""
+        return self.distribution.has_scripts()
+
+    def has_data(self):
+        """Returns true if the current distribution has any data to.
+        install."""
+        return self.distribution.has_data_files()
+
+    # 'sub_commands': a list of commands this command might have to run to
+    # get its work done.  See cmd.py for more info.
+    sub_commands = [
+        ('install_lib', has_lib),
+        ('install_headers', has_headers),
+        ('install_scripts', has_scripts),
+        ('install_data', has_data),
+        ('install_egg_info', lambda self: True),
+    ]
diff --git a/.venv/lib/python3.12/site-packages/setuptools/_distutils/command/install_data.py b/.venv/lib/python3.12/site-packages/setuptools/_distutils/command/install_data.py
new file mode 100644
index 00000000..4ad186e8
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/setuptools/_distutils/command/install_data.py
@@ -0,0 +1,94 @@
+"""distutils.command.install_data
+
+Implements the Distutils 'install_data' command, for installing
+platform-independent data files."""
+
+# contributed by Bastian Kleineidam
+
+from __future__ import annotations
+
+import functools
+import os
+from collections.abc import Iterable
+from typing import ClassVar
+
+from ..core import Command
+from ..util import change_root, convert_path
+
+
+class install_data(Command):
+    description = "install data files"
+
+    user_options = [
+        (
+            'install-dir=',
+            'd',
+            "base directory for installing data files [default: installation base dir]",
+        ),
+        ('root=', None, "install everything relative to this alternate root directory"),
+        ('force', 'f', "force installation (overwrite existing files)"),
+    ]
+
+    boolean_options: ClassVar[list[str]] = ['force']
+
+    def initialize_options(self):
+        self.install_dir = None
+        self.outfiles = []
+        self.root = None
+        self.force = False
+        self.data_files = self.distribution.data_files
+        self.warn_dir = True
+
+    def finalize_options(self) -> None:
+        self.set_undefined_options(
+            'install',
+            ('install_data', 'install_dir'),
+            ('root', 'root'),
+            ('force', 'force'),
+        )
+
+    def run(self) -> None:
+        self.mkpath(self.install_dir)
+        for f in self.data_files:
+            self._copy(f)
+
+    @functools.singledispatchmethod
+    def _copy(self, f: tuple[str | os.PathLike, Iterable[str | os.PathLike]]):
+        # it's a tuple with path to install to and a list of files
+        dir = convert_path(f[0])
+        if not os.path.isabs(dir):
+            dir = os.path.join(self.install_dir, dir)
+        elif self.root:
+            dir = change_root(self.root, dir)
+        self.mkpath(dir)
+
+        if f[1] == []:
+            # If there are no files listed, the user must be
+            # trying to create an empty directory, so add the
+            # directory to the list of output files.
+            self.outfiles.append(dir)
+        else:
+            # Copy files, adding them to the list of output files.
+            for data in f[1]:
+                data = convert_path(data)
+                (out, _) = self.copy_file(data, dir)
+                self.outfiles.append(out)
+
+    @_copy.register(str)
+    @_copy.register(os.PathLike)
+    def _(self, f: str | os.PathLike):
+        # it's a simple file, so copy it
+        f = convert_path(f)
+        if self.warn_dir:
+            self.warn(
+                "setup script did not provide a directory for "
+                f"'{f}' -- installing right in '{self.install_dir}'"
+            )
+        (out, _) = self.copy_file(f, self.install_dir)
+        self.outfiles.append(out)
+
+    def get_inputs(self):
+        return self.data_files or []
+
+    def get_outputs(self):
+        return self.outfiles
diff --git a/.venv/lib/python3.12/site-packages/setuptools/_distutils/command/install_egg_info.py b/.venv/lib/python3.12/site-packages/setuptools/_distutils/command/install_egg_info.py
new file mode 100644
index 00000000..230e94ab
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/setuptools/_distutils/command/install_egg_info.py
@@ -0,0 +1,91 @@
+"""
+distutils.command.install_egg_info
+
+Implements the Distutils 'install_egg_info' command, for installing
+a package's PKG-INFO metadata.
+"""
+
+import os
+import re
+import sys
+from typing import ClassVar
+
+from .. import dir_util
+from .._log import log
+from ..cmd import Command
+
+
+class install_egg_info(Command):
+    """Install an .egg-info file for the package"""
+
+    description = "Install package's PKG-INFO metadata as an .egg-info file"
+    user_options: ClassVar[list[tuple[str, str, str]]] = [
+        ('install-dir=', 'd', "directory to install to"),
+    ]
+
+    def initialize_options(self):
+        self.install_dir = None
+
+    @property
+    def basename(self):
+        """
+        Allow basename to be overridden by child class.
+        Ref pypa/distutils#2.
+        """
+        name = to_filename(safe_name(self.distribution.get_name()))
+        version = to_filename(safe_version(self.distribution.get_version()))
+        return f"{name}-{version}-py{sys.version_info.major}.{sys.version_info.minor}.egg-info"
+
+    def finalize_options(self):
+        self.set_undefined_options('install_lib', ('install_dir', 'install_dir'))
+        self.target = os.path.join(self.install_dir, self.basename)
+        self.outputs = [self.target]
+
+    def run(self):
+        target = self.target
+        if os.path.isdir(target) and not os.path.islink(target):
+            dir_util.remove_tree(target, dry_run=self.dry_run)
+        elif os.path.exists(target):
+            self.execute(os.unlink, (self.target,), "Removing " + target)
+        elif not os.path.isdir(self.install_dir):
+            self.execute(
+                os.makedirs, (self.install_dir,), "Creating " + self.install_dir
+            )
+        log.info("Writing %s", target)
+        if not self.dry_run:
+            with open(target, 'w', encoding='UTF-8') as f:
+                self.distribution.metadata.write_pkg_file(f)
+
+    def get_outputs(self):
+        return self.outputs
+
+
+# The following routines are taken from setuptools' pkg_resources module and
+# can be replaced by importing them from pkg_resources once it is included
+# in the stdlib.
+
+
+def safe_name(name):
+    """Convert an arbitrary string to a standard distribution name
+
+    Any runs of non-alphanumeric/. characters are replaced with a single '-'.
+    """
+    return re.sub('[^A-Za-z0-9.]+', '-', name)
+
+
+def safe_version(version):
+    """Convert an arbitrary string to a standard version string
+
+    Spaces become dots, and all other non-alphanumeric characters become
+    dashes, with runs of multiple dashes condensed to a single dash.
+    """
+    version = version.replace(' ', '.')
+    return re.sub('[^A-Za-z0-9.]+', '-', version)
+
+
+def to_filename(name):
+    """Convert a project or version name to its filename-escaped form
+
+    Any '-' characters are currently replaced with '_'.
+    """
+    return name.replace('-', '_')
diff --git a/.venv/lib/python3.12/site-packages/setuptools/_distutils/command/install_headers.py b/.venv/lib/python3.12/site-packages/setuptools/_distutils/command/install_headers.py
new file mode 100644
index 00000000..97af1371
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/setuptools/_distutils/command/install_headers.py
@@ -0,0 +1,46 @@
+"""distutils.command.install_headers
+
+Implements the Distutils 'install_headers' command, to install C/C++ header
+files to the Python include directory."""
+
+from typing import ClassVar
+
+from ..core import Command
+
+
+# XXX force is never used
+class install_headers(Command):
+    description = "install C/C++ header files"
+
+    user_options: ClassVar[list[tuple[str, str, str]]] = [
+        ('install-dir=', 'd', "directory to install header files to"),
+        ('force', 'f', "force installation (overwrite existing files)"),
+    ]
+
+    boolean_options: ClassVar[list[str]] = ['force']
+
+    def initialize_options(self):
+        self.install_dir = None
+        self.force = False
+        self.outfiles = []
+
+    def finalize_options(self):
+        self.set_undefined_options(
+            'install', ('install_headers', 'install_dir'), ('force', 'force')
+        )
+
+    def run(self):
+        headers = self.distribution.headers
+        if not headers:
+            return
+
+        self.mkpath(self.install_dir)
+        for header in headers:
+            (out, _) = self.copy_file(header, self.install_dir)
+            self.outfiles.append(out)
+
+    def get_inputs(self):
+        return self.distribution.headers or []
+
+    def get_outputs(self):
+        return self.outfiles
diff --git a/.venv/lib/python3.12/site-packages/setuptools/_distutils/command/install_lib.py b/.venv/lib/python3.12/site-packages/setuptools/_distutils/command/install_lib.py
new file mode 100644
index 00000000..2aababf8
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/setuptools/_distutils/command/install_lib.py
@@ -0,0 +1,238 @@
+"""distutils.command.install_lib
+
+Implements the Distutils 'install_lib' command
+(install all Python modules)."""
+
+from __future__ import annotations
+
+import importlib.util
+import os
+import sys
+from typing import Any, ClassVar
+
+from ..core import Command
+from ..errors import DistutilsOptionError
+
+# Extension for Python source files.
+PYTHON_SOURCE_EXTENSION = ".py"
+
+
+class install_lib(Command):
+    description = "install all Python modules (extensions and pure Python)"
+
+    # The byte-compilation options are a tad confusing.  Here are the
+    # possible scenarios:
+    #   1) no compilation at all (--no-compile --no-optimize)
+    #   2) compile .pyc only (--compile --no-optimize; default)
+    #   3) compile .pyc and "opt-1" .pyc (--compile --optimize)
+    #   4) compile "opt-1" .pyc only (--no-compile --optimize)
+    #   5) compile .pyc and "opt-2" .pyc (--compile --optimize-more)
+    #   6) compile "opt-2" .pyc only (--no-compile --optimize-more)
+    #
+    # The UI for this is two options, 'compile' and 'optimize'.
+    # 'compile' is strictly boolean, and only decides whether to
+    # generate .pyc files.  'optimize' is three-way (0, 1, or 2), and
+    # decides both whether to generate .pyc files and what level of
+    # optimization to use.
+
+    user_options = [
+        ('install-dir=', 'd', "directory to install to"),
+        ('build-dir=', 'b', "build directory (where to install from)"),
+        ('force', 'f', "force installation (overwrite existing files)"),
+        ('compile', 'c', "compile .py to .pyc [default]"),
+        ('no-compile', None, "don't compile .py files"),
+        (
+            'optimize=',
+            'O',
+            "also compile with optimization: -O1 for \"python -O\", "
+            "-O2 for \"python -OO\", and -O0 to disable [default: -O0]",
+        ),
+        ('skip-build', None, "skip the build steps"),
+    ]
+
+    boolean_options: ClassVar[list[str]] = ['force', 'compile', 'skip-build']
+    negative_opt: ClassVar[dict[str, str]] = {'no-compile': 'compile'}
+
+    def initialize_options(self):
+        # let the 'install' command dictate our installation directory
+        self.install_dir = None
+        self.build_dir = None
+        self.force = False
+        self.compile = None
+        self.optimize = None
+        self.skip_build = None
+
+    def finalize_options(self) -> None:
+        # Get all the information we need to install pure Python modules
+        # from the umbrella 'install' command -- build (source) directory,
+        # install (target) directory, and whether to compile .py files.
+        self.set_undefined_options(
+            'install',
+            ('build_lib', 'build_dir'),
+            ('install_lib', 'install_dir'),
+            ('force', 'force'),
+            ('compile', 'compile'),
+            ('optimize', 'optimize'),
+            ('skip_build', 'skip_build'),
+        )
+
+        if self.compile is None:
+            self.compile = True
+        if self.optimize is None:
+            self.optimize = False
+
+        if not isinstance(self.optimize, int):
+            try:
+                self.optimize = int(self.optimize)
+            except ValueError:
+                pass
+            if self.optimize not in (0, 1, 2):
+                raise DistutilsOptionError("optimize must be 0, 1, or 2")
+
+    def run(self) -> None:
+        # Make sure we have built everything we need first
+        self.build()
+
+        # Install everything: simply dump the entire contents of the build
+        # directory to the installation directory (that's the beauty of
+        # having a build directory!)
+        outfiles = self.install()
+
+        # (Optionally) compile .py to .pyc
+        if outfiles is not None and self.distribution.has_pure_modules():
+            self.byte_compile(outfiles)
+
+    # -- Top-level worker functions ------------------------------------
+    # (called from 'run()')
+
+    def build(self) -> None:
+        if not self.skip_build:
+            if self.distribution.has_pure_modules():
+                self.run_command('build_py')
+            if self.distribution.has_ext_modules():
+                self.run_command('build_ext')
+
+    # Any: https://typing.readthedocs.io/en/latest/guides/writing_stubs.html#the-any-trick
+    def install(self) -> list[str] | Any:
+        if os.path.isdir(self.build_dir):
+            outfiles = self.copy_tree(self.build_dir, self.install_dir)
+        else:
+            self.warn(
+                f"'{self.build_dir}' does not exist -- no Python modules to install"
+            )
+            return
+        return outfiles
+
+    def byte_compile(self, files) -> None:
+        if sys.dont_write_bytecode:
+            self.warn('byte-compiling is disabled, skipping.')
+            return
+
+        from ..util import byte_compile
+
+        # Get the "--root" directory supplied to the "install" command,
+        # and use it as a prefix to strip off the purported filename
+        # encoded in bytecode files.  This is far from complete, but it
+        # should at least generate usable bytecode in RPM distributions.
+        install_root = self.get_finalized_command('install').root
+
+        if self.compile:
+            byte_compile(
+                files,
+                optimize=0,
+                force=self.force,
+                prefix=install_root,
+                dry_run=self.dry_run,
+            )
+        if self.optimize > 0:
+            byte_compile(
+                files,
+                optimize=self.optimize,
+                force=self.force,
+                prefix=install_root,
+                verbose=self.verbose,
+                dry_run=self.dry_run,
+            )
+
+    # -- Utility methods -----------------------------------------------
+
+    def _mutate_outputs(self, has_any, build_cmd, cmd_option, output_dir):
+        if not has_any:
+            return []
+
+        build_cmd = self.get_finalized_command(build_cmd)
+        build_files = build_cmd.get_outputs()
+        build_dir = getattr(build_cmd, cmd_option)
+
+        prefix_len = len(build_dir) + len(os.sep)
+        outputs = [os.path.join(output_dir, file[prefix_len:]) for file in build_files]
+
+        return outputs
+
+    def _bytecode_filenames(self, py_filenames):
+        bytecode_files = []
+        for py_file in py_filenames:
+            # Since build_py handles package data installation, the
+            # list of outputs can contain more than just .py files.
+            # Make sure we only report bytecode for the .py files.
+            ext = os.path.splitext(os.path.normcase(py_file))[1]
+            if ext != PYTHON_SOURCE_EXTENSION:
+                continue
+            if self.compile:
+                bytecode_files.append(
+                    importlib.util.cache_from_source(py_file, optimization='')
+                )
+            if self.optimize > 0:
+                bytecode_files.append(
+                    importlib.util.cache_from_source(
+                        py_file, optimization=self.optimize
+                    )
+                )
+
+        return bytecode_files
+
+    # -- External interface --------------------------------------------
+    # (called by outsiders)
+
+    def get_outputs(self):
+        """Return the list of files that would be installed if this command
+        were actually run.  Not affected by the "dry-run" flag or whether
+        modules have actually been built yet.
+        """
+        pure_outputs = self._mutate_outputs(
+            self.distribution.has_pure_modules(),
+            'build_py',
+            'build_lib',
+            self.install_dir,
+        )
+        if self.compile:
+            bytecode_outputs = self._bytecode_filenames(pure_outputs)
+        else:
+            bytecode_outputs = []
+
+        ext_outputs = self._mutate_outputs(
+            self.distribution.has_ext_modules(),
+            'build_ext',
+            'build_lib',
+            self.install_dir,
+        )
+
+        return pure_outputs + bytecode_outputs + ext_outputs
+
+    def get_inputs(self):
+        """Get the list of files that are input to this command, ie. the
+        files that get installed as they are named in the build tree.
+        The files in this list correspond one-to-one to the output
+        filenames returned by 'get_outputs()'.
+        """
+        inputs = []
+
+        if self.distribution.has_pure_modules():
+            build_py = self.get_finalized_command('build_py')
+            inputs.extend(build_py.get_outputs())
+
+        if self.distribution.has_ext_modules():
+            build_ext = self.get_finalized_command('build_ext')
+            inputs.extend(build_ext.get_outputs())
+
+        return inputs
diff --git a/.venv/lib/python3.12/site-packages/setuptools/_distutils/command/install_scripts.py b/.venv/lib/python3.12/site-packages/setuptools/_distutils/command/install_scripts.py
new file mode 100644
index 00000000..92e86941
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/setuptools/_distutils/command/install_scripts.py
@@ -0,0 +1,62 @@
+"""distutils.command.install_scripts
+
+Implements the Distutils 'install_scripts' command, for installing
+Python scripts."""
+
+# contributed by Bastian Kleineidam
+
+import os
+from distutils._log import log
+from stat import ST_MODE
+from typing import ClassVar
+
+from ..core import Command
+
+
+class install_scripts(Command):
+    description = "install scripts (Python or otherwise)"
+
+    user_options = [
+        ('install-dir=', 'd', "directory to install scripts to"),
+        ('build-dir=', 'b', "build directory (where to install from)"),
+        ('force', 'f', "force installation (overwrite existing files)"),
+        ('skip-build', None, "skip the build steps"),
+    ]
+
+    boolean_options: ClassVar[list[str]] = ['force', 'skip-build']
+
+    def initialize_options(self):
+        self.install_dir = None
+        self.force = False
+        self.build_dir = None
+        self.skip_build = None
+
+    def finalize_options(self) -> None:
+        self.set_undefined_options('build', ('build_scripts', 'build_dir'))
+        self.set_undefined_options(
+            'install',
+            ('install_scripts', 'install_dir'),
+            ('force', 'force'),
+            ('skip_build', 'skip_build'),
+        )
+
+    def run(self) -> None:
+        if not self.skip_build:
+            self.run_command('build_scripts')
+        self.outfiles = self.copy_tree(self.build_dir, self.install_dir)
+        if os.name == 'posix':
+            # Set the executable bits (owner, group, and world) on
+            # all the scripts we just installed.
+            for file in self.get_outputs():
+                if self.dry_run:
+                    log.info("changing mode of %s", file)
+                else:
+                    mode = ((os.stat(file)[ST_MODE]) | 0o555) & 0o7777
+                    log.info("changing mode of %s to %o", file, mode)
+                    os.chmod(file, mode)
+
+    def get_inputs(self):
+        return self.distribution.scripts or []
+
+    def get_outputs(self):
+        return self.outfiles or []
diff --git a/.venv/lib/python3.12/site-packages/setuptools/_distutils/command/sdist.py b/.venv/lib/python3.12/site-packages/setuptools/_distutils/command/sdist.py
new file mode 100644
index 00000000..b3bf0c32
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/setuptools/_distutils/command/sdist.py
@@ -0,0 +1,521 @@
+"""distutils.command.sdist
+
+Implements the Distutils 'sdist' command (create a source distribution)."""
+
+from __future__ import annotations
+
+import os
+import sys
+from collections.abc import Callable
+from distutils import archive_util, dir_util, file_util
+from distutils._log import log
+from glob import glob
+from itertools import filterfalse
+from typing import ClassVar
+
+from ..core import Command
+from ..errors import DistutilsOptionError, DistutilsTemplateError
+from ..filelist import FileList
+from ..text_file import TextFile
+from ..util import convert_path
+
+
+def show_formats():
+    """Print all possible values for the 'formats' option (used by
+    the "--help-formats" command-line option).
+    """
+    from ..archive_util import ARCHIVE_FORMATS
+    from ..fancy_getopt import FancyGetopt
+
+    formats = sorted(
+        ("formats=" + format, None, ARCHIVE_FORMATS[format][2])
+        for format in ARCHIVE_FORMATS.keys()
+    )
+    FancyGetopt(formats).print_help("List of available source distribution formats:")
+
+
+class sdist(Command):
+    description = "create a source distribution (tarball, zip file, etc.)"
+
+    def checking_metadata(self) -> bool:
+        """Callable used for the check sub-command.
+
+        Placed here so user_options can view it"""
+        return self.metadata_check
+
+    user_options = [
+        ('template=', 't', "name of manifest template file [default: MANIFEST.in]"),
+        ('manifest=', 'm', "name of manifest file [default: MANIFEST]"),
+        (
+            'use-defaults',
+            None,
+            "include the default file set in the manifest "
+            "[default; disable with --no-defaults]",
+        ),
+        ('no-defaults', None, "don't include the default file set"),
+        (
+            'prune',
+            None,
+            "specifically exclude files/directories that should not be "
+            "distributed (build tree, RCS/CVS dirs, etc.) "
+            "[default; disable with --no-prune]",
+        ),
+        ('no-prune', None, "don't automatically exclude anything"),
+        (
+            'manifest-only',
+            'o',
+            "just regenerate the manifest and then stop (implies --force-manifest)",
+        ),
+        (
+            'force-manifest',
+            'f',
+            "forcibly regenerate the manifest and carry on as usual. "
+            "Deprecated: now the manifest is always regenerated.",
+        ),
+        ('formats=', None, "formats for source distribution (comma-separated list)"),
+        (
+            'keep-temp',
+            'k',
+            "keep the distribution tree around after creating " + "archive file(s)",
+        ),
+        (
+            'dist-dir=',
+            'd',
+            "directory to put the source distribution archive(s) in [default: dist]",
+        ),
+        (
+            'metadata-check',
+            None,
+            "Ensure that all required elements of meta-data "
+            "are supplied. Warn if any missing. [default]",
+        ),
+        (
+            'owner=',
+            'u',
+            "Owner name used when creating a tar file [default: current user]",
+        ),
+        (
+            'group=',
+            'g',
+            "Group name used when creating a tar file [default: current group]",
+        ),
+    ]
+
+    boolean_options: ClassVar[list[str]] = [
+        'use-defaults',
+        'prune',
+        'manifest-only',
+        'force-manifest',
+        'keep-temp',
+        'metadata-check',
+    ]
+
+    help_options: ClassVar[list[tuple[str, str | None, str, Callable[[], object]]]] = [
+        ('help-formats', None, "list available distribution formats", show_formats),
+    ]
+
+    negative_opt: ClassVar[dict[str, str]] = {
+        'no-defaults': 'use-defaults',
+        'no-prune': 'prune',
+    }
+
+    sub_commands = [('check', checking_metadata)]
+
+    READMES: ClassVar[tuple[str, ...]] = ('README', 'README.txt', 'README.rst')
+
+    def initialize_options(self):
+        # 'template' and 'manifest' are, respectively, the names of
+        # the manifest template and manifest file.
+        self.template = None
+        self.manifest = None
+
+        # 'use_defaults': if true, we will include the default file set
+        # in the manifest
+        self.use_defaults = True
+        self.prune = True
+
+        self.manifest_only = False
+        self.force_manifest = False
+
+        self.formats = ['gztar']
+        self.keep_temp = False
+        self.dist_dir = None
+
+        self.archive_files = None
+        self.metadata_check = True
+        self.owner = None
+        self.group = None
+
+    def finalize_options(self) -> None:
+        if self.manifest is None:
+            self.manifest = "MANIFEST"
+        if self.template is None:
+            self.template = "MANIFEST.in"
+
+        self.ensure_string_list('formats')
+
+        bad_format = archive_util.check_archive_formats(self.formats)
+        if bad_format:
+            raise DistutilsOptionError(f"unknown archive format '{bad_format}'")
+
+        if self.dist_dir is None:
+            self.dist_dir = "dist"
+
+    def run(self) -> None:
+        # 'filelist' contains the list of files that will make up the
+        # manifest
+        self.filelist = FileList()
+
+        # Run sub commands
+        for cmd_name in self.get_sub_commands():
+            self.run_command(cmd_name)
+
+        # Do whatever it takes to get the list of files to process
+        # (process the manifest template, read an existing manifest,
+        # whatever).  File list is accumulated in 'self.filelist'.
+        self.get_file_list()
+
+        # If user just wanted us to regenerate the manifest, stop now.
+        if self.manifest_only:
+            return
+
+        # Otherwise, go ahead and create the source distribution tarball,
+        # or zipfile, or whatever.
+        self.make_distribution()
+
+    def get_file_list(self) -> None:
+        """Figure out the list of files to include in the source
+        distribution, and put it in 'self.filelist'.  This might involve
+        reading the manifest template (and writing the manifest), or just
+        reading the manifest, or just using the default file set -- it all
+        depends on the user's options.
+        """
+        # new behavior when using a template:
+        # the file list is recalculated every time because
+        # even if MANIFEST.in or setup.py are not changed
+        # the user might have added some files in the tree that
+        # need to be included.
+        #
+        #  This makes --force the default and only behavior with templates.
+        template_exists = os.path.isfile(self.template)
+        if not template_exists and self._manifest_is_not_generated():
+            self.read_manifest()
+            self.filelist.sort()
+            self.filelist.remove_duplicates()
+            return
+
+        if not template_exists:
+            self.warn(
+                ("manifest template '%s' does not exist " + "(using default file list)")
+                % self.template
+            )
+        self.filelist.findall()
+
+        if self.use_defaults:
+            self.add_defaults()
+
+        if template_exists:
+            self.read_template()
+
+        if self.prune:
+            self.prune_file_list()
+
+        self.filelist.sort()
+        self.filelist.remove_duplicates()
+        self.write_manifest()
+
+    def add_defaults(self) -> None:
+        """Add all the default files to self.filelist:
+          - README or README.txt
+          - setup.py
+          - tests/test*.py and test/test*.py
+          - all pure Python modules mentioned in setup script
+          - all files pointed by package_data (build_py)
+          - all files defined in data_files.
+          - all files defined as scripts.
+          - all C sources listed as part of extensions or C libraries
+            in the setup script (doesn't catch C headers!)
+        Warns if (README or README.txt) or setup.py are missing; everything
+        else is optional.
+        """
+        self._add_defaults_standards()
+        self._add_defaults_optional()
+        self._add_defaults_python()
+        self._add_defaults_data_files()
+        self._add_defaults_ext()
+        self._add_defaults_c_libs()
+        self._add_defaults_scripts()
+
+    @staticmethod
+    def _cs_path_exists(fspath):
+        """
+        Case-sensitive path existence check
+
+        >>> sdist._cs_path_exists(__file__)
+        True
+        >>> sdist._cs_path_exists(__file__.upper())
+        False
+        """
+        if not os.path.exists(fspath):
+            return False
+        # make absolute so we always have a directory
+        abspath = os.path.abspath(fspath)
+        directory, filename = os.path.split(abspath)
+        return filename in os.listdir(directory)
+
+    def _add_defaults_standards(self):
+        standards = [self.READMES, self.distribution.script_name]
+        for fn in standards:
+            if isinstance(fn, tuple):
+                alts = fn
+                got_it = False
+                for fn in alts:
+                    if self._cs_path_exists(fn):
+                        got_it = True
+                        self.filelist.append(fn)
+                        break
+
+                if not got_it:
+                    self.warn(
+                        "standard file not found: should have one of " + ', '.join(alts)
+                    )
+            else:
+                if self._cs_path_exists(fn):
+                    self.filelist.append(fn)
+                else:
+                    self.warn(f"standard file '{fn}' not found")
+
+    def _add_defaults_optional(self):
+        optional = ['tests/test*.py', 'test/test*.py', 'setup.cfg']
+        for pattern in optional:
+            files = filter(os.path.isfile, glob(pattern))
+            self.filelist.extend(files)
+
+    def _add_defaults_python(self):
+        # build_py is used to get:
+        #  - python modules
+        #  - files defined in package_data
+        build_py = self.get_finalized_command('build_py')
+
+        # getting python files
+        if self.distribution.has_pure_modules():
+            self.filelist.extend(build_py.get_source_files())
+
+        # getting package_data files
+        # (computed in build_py.data_files by build_py.finalize_options)
+        for _pkg, src_dir, _build_dir, filenames in build_py.data_files:
+            for filename in filenames:
+                self.filelist.append(os.path.join(src_dir, filename))
+
+    def _add_defaults_data_files(self):
+        # getting distribution.data_files
+        if self.distribution.has_data_files():
+            for item in self.distribution.data_files:
+                if isinstance(item, str):
+                    # plain file
+                    item = convert_path(item)
+                    if os.path.isfile(item):
+                        self.filelist.append(item)
+                else:
+                    # a (dirname, filenames) tuple
+                    dirname, filenames = item
+                    for f in filenames:
+                        f = convert_path(f)
+                        if os.path.isfile(f):
+                            self.filelist.append(f)
+
+    def _add_defaults_ext(self):
+        if self.distribution.has_ext_modules():
+            build_ext = self.get_finalized_command('build_ext')
+            self.filelist.extend(build_ext.get_source_files())
+
+    def _add_defaults_c_libs(self):
+        if self.distribution.has_c_libraries():
+            build_clib = self.get_finalized_command('build_clib')
+            self.filelist.extend(build_clib.get_source_files())
+
+    def _add_defaults_scripts(self):
+        if self.distribution.has_scripts():
+            build_scripts = self.get_finalized_command('build_scripts')
+            self.filelist.extend(build_scripts.get_source_files())
+
+    def read_template(self) -> None:
+        """Read and parse manifest template file named by self.template.
+
+        (usually "MANIFEST.in") The parsing and processing is done by
+        'self.filelist', which updates itself accordingly.
+        """
+        log.info("reading manifest template '%s'", self.template)
+        template = TextFile(
+            self.template,
+            strip_comments=True,
+            skip_blanks=True,
+            join_lines=True,
+            lstrip_ws=True,
+            rstrip_ws=True,
+            collapse_join=True,
+        )
+
+        try:
+            while True:
+                line = template.readline()
+                if line is None:  # end of file
+                    break
+
+                try:
+                    self.filelist.process_template_line(line)
+                # the call above can raise a DistutilsTemplateError for
+                # malformed lines, or a ValueError from the lower-level
+                # convert_path function
+                except (DistutilsTemplateError, ValueError) as msg:
+                    self.warn(
+                        f"{template.filename}, line {int(template.current_line)}: {msg}"
+                    )
+        finally:
+            template.close()
+
+    def prune_file_list(self) -> None:
+        """Prune off branches that might slip into the file list as created
+        by 'read_template()', but really don't belong there:
+          * the build tree (typically "build")
+          * the release tree itself (only an issue if we ran "sdist"
+            previously with --keep-temp, or it aborted)
+          * any RCS, CVS, .svn, .hg, .git, .bzr, _darcs directories
+        """
+        build = self.get_finalized_command('build')
+        base_dir = self.distribution.get_fullname()
+
+        self.filelist.exclude_pattern(None, prefix=os.fspath(build.build_base))
+        self.filelist.exclude_pattern(None, prefix=base_dir)
+
+        if sys.platform == 'win32':
+            seps = r'/|\\'
+        else:
+            seps = '/'
+
+        vcs_dirs = ['RCS', 'CVS', r'\.svn', r'\.hg', r'\.git', r'\.bzr', '_darcs']
+        vcs_ptrn = r'(^|{})({})({}).*'.format(seps, '|'.join(vcs_dirs), seps)
+        self.filelist.exclude_pattern(vcs_ptrn, is_regex=True)
+
+    def write_manifest(self) -> None:
+        """Write the file list in 'self.filelist' (presumably as filled in
+        by 'add_defaults()' and 'read_template()') to the manifest file
+        named by 'self.manifest'.
+        """
+        if self._manifest_is_not_generated():
+            log.info(
+                f"not writing to manually maintained manifest file '{self.manifest}'"
+            )
+            return
+
+        content = self.filelist.files[:]
+        content.insert(0, '# file GENERATED by distutils, do NOT edit')
+        self.execute(
+            file_util.write_file,
+            (self.manifest, content),
+            f"writing manifest file '{self.manifest}'",
+        )
+
+    def _manifest_is_not_generated(self):
+        # check for special comment used in 3.1.3 and higher
+        if not os.path.isfile(self.manifest):
+            return False
+
+        with open(self.manifest, encoding='utf-8') as fp:
+            first_line = next(fp)
+        return first_line != '# file GENERATED by distutils, do NOT edit\n'
+
+    def read_manifest(self) -> None:
+        """Read the manifest file (named by 'self.manifest') and use it to
+        fill in 'self.filelist', the list of files to include in the source
+        distribution.
+        """
+        log.info("reading manifest file '%s'", self.manifest)
+        with open(self.manifest, encoding='utf-8') as lines:
+            self.filelist.extend(
+                # ignore comments and blank lines
+                filter(None, filterfalse(is_comment, map(str.strip, lines)))
+            )
+
+    def make_release_tree(self, base_dir, files) -> None:
+        """Create the directory tree that will become the source
+        distribution archive.  All directories implied by the filenames in
+        'files' are created under 'base_dir', and then we hard link or copy
+        (if hard linking is unavailable) those files into place.
+        Essentially, this duplicates the developer's source tree, but in a
+        directory named after the distribution, containing only the files
+        to be distributed.
+        """
+        # Create all the directories under 'base_dir' necessary to
+        # put 'files' there; the 'mkpath()' is just so we don't die
+        # if the manifest happens to be empty.
+        self.mkpath(base_dir)
+        dir_util.create_tree(base_dir, files, dry_run=self.dry_run)
+
+        # And walk over the list of files, either making a hard link (if
+        # os.link exists) to each one that doesn't already exist in its
+        # corresponding location under 'base_dir', or copying each file
+        # that's out-of-date in 'base_dir'.  (Usually, all files will be
+        # out-of-date, because by default we blow away 'base_dir' when
+        # we're done making the distribution archives.)
+
+        if hasattr(os, 'link'):  # can make hard links on this system
+            link = 'hard'
+            msg = f"making hard links in {base_dir}..."
+        else:  # nope, have to copy
+            link = None
+            msg = f"copying files to {base_dir}..."
+
+        if not files:
+            log.warning("no files to distribute -- empty manifest?")
+        else:
+            log.info(msg)
+        for file in files:
+            if not os.path.isfile(file):
+                log.warning("'%s' not a regular file -- skipping", file)
+            else:
+                dest = os.path.join(base_dir, file)
+                self.copy_file(file, dest, link=link)
+
+        self.distribution.metadata.write_pkg_info(base_dir)
+
+    def make_distribution(self) -> None:
+        """Create the source distribution(s).  First, we create the release
+        tree with 'make_release_tree()'; then, we create all required
+        archive files (according to 'self.formats') from the release tree.
+        Finally, we clean up by blowing away the release tree (unless
+        'self.keep_temp' is true).  The list of archive files created is
+        stored so it can be retrieved later by 'get_archive_files()'.
+        """
+        # Don't warn about missing meta-data here -- should be (and is!)
+        # done elsewhere.
+        base_dir = self.distribution.get_fullname()
+        base_name = os.path.join(self.dist_dir, base_dir)
+
+        self.make_release_tree(base_dir, self.filelist.files)
+        archive_files = []  # remember names of files we create
+        # tar archive must be created last to avoid overwrite and remove
+        if 'tar' in self.formats:
+            self.formats.append(self.formats.pop(self.formats.index('tar')))
+
+        for fmt in self.formats:
+            file = self.make_archive(
+                base_name, fmt, base_dir=base_dir, owner=self.owner, group=self.group
+            )
+            archive_files.append(file)
+            self.distribution.dist_files.append(('sdist', '', file))
+
+        self.archive_files = archive_files
+
+        if not self.keep_temp:
+            dir_util.remove_tree(base_dir, dry_run=self.dry_run)
+
+    def get_archive_files(self):
+        """Return the list of archive files created when the command
+        was run, or None if the command hasn't run yet.
+        """
+        return self.archive_files
+
+
+def is_comment(line: str) -> bool:
+    return line.startswith('#')