about summary refs log tree commit diff
path: root/.venv/lib/python3.12/site-packages/sentry_sdk/integrations
diff options
context:
space:
mode:
authorS. Solomon Darnell2025-03-28 21:52:21 -0500
committerS. Solomon Darnell2025-03-28 21:52:21 -0500
commit4a52a71956a8d46fcb7294ac71734504bb09bcc2 (patch)
treeee3dc5af3b6313e921cd920906356f5d4febc4ed /.venv/lib/python3.12/site-packages/sentry_sdk/integrations
parentcc961e04ba734dd72309fb548a2f97d67d578813 (diff)
downloadgn-ai-master.tar.gz
two version of R2R are here HEAD master
Diffstat (limited to '.venv/lib/python3.12/site-packages/sentry_sdk/integrations')
-rw-r--r--.venv/lib/python3.12/site-packages/sentry_sdk/integrations/__init__.py293
-rw-r--r--.venv/lib/python3.12/site-packages/sentry_sdk/integrations/_asgi_common.py108
-rw-r--r--.venv/lib/python3.12/site-packages/sentry_sdk/integrations/_wsgi_common.py271
-rw-r--r--.venv/lib/python3.12/site-packages/sentry_sdk/integrations/aiohttp.py357
-rw-r--r--.venv/lib/python3.12/site-packages/sentry_sdk/integrations/anthropic.py288
-rw-r--r--.venv/lib/python3.12/site-packages/sentry_sdk/integrations/argv.py31
-rw-r--r--.venv/lib/python3.12/site-packages/sentry_sdk/integrations/ariadne.py161
-rw-r--r--.venv/lib/python3.12/site-packages/sentry_sdk/integrations/arq.py246
-rw-r--r--.venv/lib/python3.12/site-packages/sentry_sdk/integrations/asgi.py337
-rw-r--r--.venv/lib/python3.12/site-packages/sentry_sdk/integrations/asyncio.py144
-rw-r--r--.venv/lib/python3.12/site-packages/sentry_sdk/integrations/asyncpg.py208
-rw-r--r--.venv/lib/python3.12/site-packages/sentry_sdk/integrations/atexit.py57
-rw-r--r--.venv/lib/python3.12/site-packages/sentry_sdk/integrations/aws_lambda.py499
-rw-r--r--.venv/lib/python3.12/site-packages/sentry_sdk/integrations/beam.py176
-rw-r--r--.venv/lib/python3.12/site-packages/sentry_sdk/integrations/boto3.py137
-rw-r--r--.venv/lib/python3.12/site-packages/sentry_sdk/integrations/bottle.py221
-rw-r--r--.venv/lib/python3.12/site-packages/sentry_sdk/integrations/celery/__init__.py528
-rw-r--r--.venv/lib/python3.12/site-packages/sentry_sdk/integrations/celery/beat.py293
-rw-r--r--.venv/lib/python3.12/site-packages/sentry_sdk/integrations/celery/utils.py43
-rw-r--r--.venv/lib/python3.12/site-packages/sentry_sdk/integrations/chalice.py134
-rw-r--r--.venv/lib/python3.12/site-packages/sentry_sdk/integrations/clickhouse_driver.py157
-rw-r--r--.venv/lib/python3.12/site-packages/sentry_sdk/integrations/cloud_resource_context.py280
-rw-r--r--.venv/lib/python3.12/site-packages/sentry_sdk/integrations/cohere.py270
-rw-r--r--.venv/lib/python3.12/site-packages/sentry_sdk/integrations/dedupe.py51
-rw-r--r--.venv/lib/python3.12/site-packages/sentry_sdk/integrations/django/__init__.py747
-rw-r--r--.venv/lib/python3.12/site-packages/sentry_sdk/integrations/django/asgi.py245
-rw-r--r--.venv/lib/python3.12/site-packages/sentry_sdk/integrations/django/caching.py191
-rw-r--r--.venv/lib/python3.12/site-packages/sentry_sdk/integrations/django/middleware.py187
-rw-r--r--.venv/lib/python3.12/site-packages/sentry_sdk/integrations/django/signals_handlers.py91
-rw-r--r--.venv/lib/python3.12/site-packages/sentry_sdk/integrations/django/templates.py188
-rw-r--r--.venv/lib/python3.12/site-packages/sentry_sdk/integrations/django/transactions.py159
-rw-r--r--.venv/lib/python3.12/site-packages/sentry_sdk/integrations/django/views.py96
-rw-r--r--.venv/lib/python3.12/site-packages/sentry_sdk/integrations/dramatiq.py168
-rw-r--r--.venv/lib/python3.12/site-packages/sentry_sdk/integrations/excepthook.py83
-rw-r--r--.venv/lib/python3.12/site-packages/sentry_sdk/integrations/executing.py67
-rw-r--r--.venv/lib/python3.12/site-packages/sentry_sdk/integrations/falcon.py272
-rw-r--r--.venv/lib/python3.12/site-packages/sentry_sdk/integrations/fastapi.py147
-rw-r--r--.venv/lib/python3.12/site-packages/sentry_sdk/integrations/flask.py275
-rw-r--r--.venv/lib/python3.12/site-packages/sentry_sdk/integrations/gcp.py234
-rw-r--r--.venv/lib/python3.12/site-packages/sentry_sdk/integrations/gnu_backtrace.py107
-rw-r--r--.venv/lib/python3.12/site-packages/sentry_sdk/integrations/gql.py145
-rw-r--r--.venv/lib/python3.12/site-packages/sentry_sdk/integrations/graphene.py151
-rw-r--r--.venv/lib/python3.12/site-packages/sentry_sdk/integrations/grpc/__init__.py151
-rw-r--r--.venv/lib/python3.12/site-packages/sentry_sdk/integrations/grpc/aio/__init__.py7
-rw-r--r--.venv/lib/python3.12/site-packages/sentry_sdk/integrations/grpc/aio/client.py94
-rw-r--r--.venv/lib/python3.12/site-packages/sentry_sdk/integrations/grpc/aio/server.py100
-rw-r--r--.venv/lib/python3.12/site-packages/sentry_sdk/integrations/grpc/client.py92
-rw-r--r--.venv/lib/python3.12/site-packages/sentry_sdk/integrations/grpc/consts.py1
-rw-r--r--.venv/lib/python3.12/site-packages/sentry_sdk/integrations/grpc/server.py66
-rw-r--r--.venv/lib/python3.12/site-packages/sentry_sdk/integrations/httpx.py167
-rw-r--r--.venv/lib/python3.12/site-packages/sentry_sdk/integrations/huey.py174
-rw-r--r--.venv/lib/python3.12/site-packages/sentry_sdk/integrations/huggingface_hub.py175
-rw-r--r--.venv/lib/python3.12/site-packages/sentry_sdk/integrations/langchain.py465
-rw-r--r--.venv/lib/python3.12/site-packages/sentry_sdk/integrations/launchdarkly.py62
-rw-r--r--.venv/lib/python3.12/site-packages/sentry_sdk/integrations/litestar.py306
-rw-r--r--.venv/lib/python3.12/site-packages/sentry_sdk/integrations/logging.py298
-rw-r--r--.venv/lib/python3.12/site-packages/sentry_sdk/integrations/loguru.py130
-rw-r--r--.venv/lib/python3.12/site-packages/sentry_sdk/integrations/modules.py29
-rw-r--r--.venv/lib/python3.12/site-packages/sentry_sdk/integrations/openai.py429
-rw-r--r--.venv/lib/python3.12/site-packages/sentry_sdk/integrations/openfeature.py39
-rw-r--r--.venv/lib/python3.12/site-packages/sentry_sdk/integrations/opentelemetry/__init__.py7
-rw-r--r--.venv/lib/python3.12/site-packages/sentry_sdk/integrations/opentelemetry/consts.py5
-rw-r--r--.venv/lib/python3.12/site-packages/sentry_sdk/integrations/opentelemetry/integration.py58
-rw-r--r--.venv/lib/python3.12/site-packages/sentry_sdk/integrations/opentelemetry/propagator.py117
-rw-r--r--.venv/lib/python3.12/site-packages/sentry_sdk/integrations/opentelemetry/span_processor.py391
-rw-r--r--.venv/lib/python3.12/site-packages/sentry_sdk/integrations/pure_eval.py139
-rw-r--r--.venv/lib/python3.12/site-packages/sentry_sdk/integrations/pymongo.py214
-rw-r--r--.venv/lib/python3.12/site-packages/sentry_sdk/integrations/pyramid.py229
-rw-r--r--.venv/lib/python3.12/site-packages/sentry_sdk/integrations/quart.py237
-rw-r--r--.venv/lib/python3.12/site-packages/sentry_sdk/integrations/ray.py141
-rw-r--r--.venv/lib/python3.12/site-packages/sentry_sdk/integrations/redis/__init__.py38
-rw-r--r--.venv/lib/python3.12/site-packages/sentry_sdk/integrations/redis/_async_common.py108
-rw-r--r--.venv/lib/python3.12/site-packages/sentry_sdk/integrations/redis/_sync_common.py113
-rw-r--r--.venv/lib/python3.12/site-packages/sentry_sdk/integrations/redis/consts.py19
-rw-r--r--.venv/lib/python3.12/site-packages/sentry_sdk/integrations/redis/modules/__init__.py0
-rw-r--r--.venv/lib/python3.12/site-packages/sentry_sdk/integrations/redis/modules/caches.py121
-rw-r--r--.venv/lib/python3.12/site-packages/sentry_sdk/integrations/redis/modules/queries.py68
-rw-r--r--.venv/lib/python3.12/site-packages/sentry_sdk/integrations/redis/rb.py32
-rw-r--r--.venv/lib/python3.12/site-packages/sentry_sdk/integrations/redis/redis.py69
-rw-r--r--.venv/lib/python3.12/site-packages/sentry_sdk/integrations/redis/redis_cluster.py99
-rw-r--r--.venv/lib/python3.12/site-packages/sentry_sdk/integrations/redis/redis_py_cluster_legacy.py50
-rw-r--r--.venv/lib/python3.12/site-packages/sentry_sdk/integrations/redis/utils.py144
-rw-r--r--.venv/lib/python3.12/site-packages/sentry_sdk/integrations/rq.py161
-rw-r--r--.venv/lib/python3.12/site-packages/sentry_sdk/integrations/rust_tracing.py284
-rw-r--r--.venv/lib/python3.12/site-packages/sentry_sdk/integrations/sanic.py368
-rw-r--r--.venv/lib/python3.12/site-packages/sentry_sdk/integrations/serverless.py76
-rw-r--r--.venv/lib/python3.12/site-packages/sentry_sdk/integrations/socket.py96
-rw-r--r--.venv/lib/python3.12/site-packages/sentry_sdk/integrations/spark/__init__.py4
-rw-r--r--.venv/lib/python3.12/site-packages/sentry_sdk/integrations/spark/spark_driver.py315
-rw-r--r--.venv/lib/python3.12/site-packages/sentry_sdk/integrations/spark/spark_worker.py116
-rw-r--r--.venv/lib/python3.12/site-packages/sentry_sdk/integrations/sqlalchemy.py146
-rw-r--r--.venv/lib/python3.12/site-packages/sentry_sdk/integrations/starlette.py740
-rw-r--r--.venv/lib/python3.12/site-packages/sentry_sdk/integrations/starlite.py292
-rw-r--r--.venv/lib/python3.12/site-packages/sentry_sdk/integrations/statsig.py37
-rw-r--r--.venv/lib/python3.12/site-packages/sentry_sdk/integrations/stdlib.py265
-rw-r--r--.venv/lib/python3.12/site-packages/sentry_sdk/integrations/strawberry.py393
-rw-r--r--.venv/lib/python3.12/site-packages/sentry_sdk/integrations/sys_exit.py70
-rw-r--r--.venv/lib/python3.12/site-packages/sentry_sdk/integrations/threading.py121
-rw-r--r--.venv/lib/python3.12/site-packages/sentry_sdk/integrations/tornado.py220
-rw-r--r--.venv/lib/python3.12/site-packages/sentry_sdk/integrations/trytond.py50
-rw-r--r--.venv/lib/python3.12/site-packages/sentry_sdk/integrations/typer.py60
-rw-r--r--.venv/lib/python3.12/site-packages/sentry_sdk/integrations/unleash.py34
-rw-r--r--.venv/lib/python3.12/site-packages/sentry_sdk/integrations/wsgi.py310
103 files changed, 18185 insertions, 0 deletions
diff --git a/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/__init__.py b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/__init__.py
new file mode 100644
index 00000000..9bff2647
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/__init__.py
@@ -0,0 +1,293 @@
+from abc import ABC, abstractmethod
+from threading import Lock
+
+from sentry_sdk.utils import logger
+
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+    from collections.abc import Sequence
+    from typing import Callable
+    from typing import Dict
+    from typing import Iterator
+    from typing import List
+    from typing import Optional
+    from typing import Set
+    from typing import Type
+    from typing import Union
+
+
+_DEFAULT_FAILED_REQUEST_STATUS_CODES = frozenset(range(500, 600))
+
+
+_installer_lock = Lock()
+
+# Set of all integration identifiers we have attempted to install
+_processed_integrations = set()  # type: Set[str]
+
+# Set of all integration identifiers we have actually installed
+_installed_integrations = set()  # type: Set[str]
+
+
+def _generate_default_integrations_iterator(
+    integrations,  # type: List[str]
+    auto_enabling_integrations,  # type: List[str]
+):
+    # type: (...) -> Callable[[bool], Iterator[Type[Integration]]]
+
+    def iter_default_integrations(with_auto_enabling_integrations):
+        # type: (bool) -> Iterator[Type[Integration]]
+        """Returns an iterator of the default integration classes:"""
+        from importlib import import_module
+
+        if with_auto_enabling_integrations:
+            all_import_strings = integrations + auto_enabling_integrations
+        else:
+            all_import_strings = integrations
+
+        for import_string in all_import_strings:
+            try:
+                module, cls = import_string.rsplit(".", 1)
+                yield getattr(import_module(module), cls)
+            except (DidNotEnable, SyntaxError) as e:
+                logger.debug(
+                    "Did not import default integration %s: %s", import_string, e
+                )
+
+    if isinstance(iter_default_integrations.__doc__, str):
+        for import_string in integrations:
+            iter_default_integrations.__doc__ += "\n- `{}`".format(import_string)
+
+    return iter_default_integrations
+
+
+_DEFAULT_INTEGRATIONS = [
+    # stdlib/base runtime integrations
+    "sentry_sdk.integrations.argv.ArgvIntegration",
+    "sentry_sdk.integrations.atexit.AtexitIntegration",
+    "sentry_sdk.integrations.dedupe.DedupeIntegration",
+    "sentry_sdk.integrations.excepthook.ExcepthookIntegration",
+    "sentry_sdk.integrations.logging.LoggingIntegration",
+    "sentry_sdk.integrations.modules.ModulesIntegration",
+    "sentry_sdk.integrations.stdlib.StdlibIntegration",
+    "sentry_sdk.integrations.threading.ThreadingIntegration",
+]
+
+_AUTO_ENABLING_INTEGRATIONS = [
+    "sentry_sdk.integrations.aiohttp.AioHttpIntegration",
+    "sentry_sdk.integrations.anthropic.AnthropicIntegration",
+    "sentry_sdk.integrations.ariadne.AriadneIntegration",
+    "sentry_sdk.integrations.arq.ArqIntegration",
+    "sentry_sdk.integrations.asyncpg.AsyncPGIntegration",
+    "sentry_sdk.integrations.boto3.Boto3Integration",
+    "sentry_sdk.integrations.bottle.BottleIntegration",
+    "sentry_sdk.integrations.celery.CeleryIntegration",
+    "sentry_sdk.integrations.chalice.ChaliceIntegration",
+    "sentry_sdk.integrations.clickhouse_driver.ClickhouseDriverIntegration",
+    "sentry_sdk.integrations.cohere.CohereIntegration",
+    "sentry_sdk.integrations.django.DjangoIntegration",
+    "sentry_sdk.integrations.falcon.FalconIntegration",
+    "sentry_sdk.integrations.fastapi.FastApiIntegration",
+    "sentry_sdk.integrations.flask.FlaskIntegration",
+    "sentry_sdk.integrations.gql.GQLIntegration",
+    "sentry_sdk.integrations.graphene.GrapheneIntegration",
+    "sentry_sdk.integrations.httpx.HttpxIntegration",
+    "sentry_sdk.integrations.huey.HueyIntegration",
+    "sentry_sdk.integrations.huggingface_hub.HuggingfaceHubIntegration",
+    "sentry_sdk.integrations.langchain.LangchainIntegration",
+    "sentry_sdk.integrations.litestar.LitestarIntegration",
+    "sentry_sdk.integrations.loguru.LoguruIntegration",
+    "sentry_sdk.integrations.openai.OpenAIIntegration",
+    "sentry_sdk.integrations.pymongo.PyMongoIntegration",
+    "sentry_sdk.integrations.pyramid.PyramidIntegration",
+    "sentry_sdk.integrations.quart.QuartIntegration",
+    "sentry_sdk.integrations.redis.RedisIntegration",
+    "sentry_sdk.integrations.rq.RqIntegration",
+    "sentry_sdk.integrations.sanic.SanicIntegration",
+    "sentry_sdk.integrations.sqlalchemy.SqlalchemyIntegration",
+    "sentry_sdk.integrations.starlette.StarletteIntegration",
+    "sentry_sdk.integrations.starlite.StarliteIntegration",
+    "sentry_sdk.integrations.strawberry.StrawberryIntegration",
+    "sentry_sdk.integrations.tornado.TornadoIntegration",
+]
+
+iter_default_integrations = _generate_default_integrations_iterator(
+    integrations=_DEFAULT_INTEGRATIONS,
+    auto_enabling_integrations=_AUTO_ENABLING_INTEGRATIONS,
+)
+
+del _generate_default_integrations_iterator
+
+
+_MIN_VERSIONS = {
+    "aiohttp": (3, 4),
+    "anthropic": (0, 16),
+    "ariadne": (0, 20),
+    "arq": (0, 23),
+    "asyncpg": (0, 23),
+    "beam": (2, 12),
+    "boto3": (1, 12),  # botocore
+    "bottle": (0, 12),
+    "celery": (4, 4, 7),
+    "chalice": (1, 16, 0),
+    "clickhouse_driver": (0, 2, 0),
+    "django": (1, 8),
+    "dramatiq": (1, 9),
+    "falcon": (1, 4),
+    "fastapi": (0, 79, 0),
+    "flask": (1, 1, 4),
+    "gql": (3, 4, 1),
+    "graphene": (3, 3),
+    "grpc": (1, 32, 0),  # grpcio
+    "huggingface_hub": (0, 22),
+    "langchain": (0, 0, 210),
+    "launchdarkly": (9, 8, 0),
+    "loguru": (0, 7, 0),
+    "openai": (1, 0, 0),
+    "openfeature": (0, 7, 1),
+    "quart": (0, 16, 0),
+    "ray": (2, 7, 0),
+    "requests": (2, 0, 0),
+    "rq": (0, 6),
+    "sanic": (0, 8),
+    "sqlalchemy": (1, 2),
+    "starlette": (0, 16),
+    "starlite": (1, 48),
+    "statsig": (0, 55, 3),
+    "strawberry": (0, 209, 5),
+    "tornado": (6, 0),
+    "typer": (0, 15),
+    "unleash": (6, 0, 1),
+}
+
+
+def setup_integrations(
+    integrations,
+    with_defaults=True,
+    with_auto_enabling_integrations=False,
+    disabled_integrations=None,
+):
+    # type: (Sequence[Integration], bool, bool, Optional[Sequence[Union[type[Integration], Integration]]]) -> Dict[str, Integration]
+    """
+    Given a list of integration instances, this installs them all.
+
+    When `with_defaults` is set to `True` all default integrations are added
+    unless they were already provided before.
+
+    `disabled_integrations` takes precedence over `with_defaults` and
+    `with_auto_enabling_integrations`.
+    """
+    integrations = dict(
+        (integration.identifier, integration) for integration in integrations or ()
+    )
+
+    logger.debug("Setting up integrations (with default = %s)", with_defaults)
+
+    # Integrations that will not be enabled
+    disabled_integrations = [
+        integration if isinstance(integration, type) else type(integration)
+        for integration in disabled_integrations or []
+    ]
+
+    # Integrations that are not explicitly set up by the user.
+    used_as_default_integration = set()
+
+    if with_defaults:
+        for integration_cls in iter_default_integrations(
+            with_auto_enabling_integrations
+        ):
+            if integration_cls.identifier not in integrations:
+                instance = integration_cls()
+                integrations[instance.identifier] = instance
+                used_as_default_integration.add(instance.identifier)
+
+    for identifier, integration in integrations.items():
+        with _installer_lock:
+            if identifier not in _processed_integrations:
+                if type(integration) in disabled_integrations:
+                    logger.debug("Ignoring integration %s", identifier)
+                else:
+                    logger.debug(
+                        "Setting up previously not enabled integration %s", identifier
+                    )
+                    try:
+                        type(integration).setup_once()
+                    except DidNotEnable as e:
+                        if identifier not in used_as_default_integration:
+                            raise
+
+                        logger.debug(
+                            "Did not enable default integration %s: %s", identifier, e
+                        )
+                    else:
+                        _installed_integrations.add(identifier)
+
+                _processed_integrations.add(identifier)
+
+    integrations = {
+        identifier: integration
+        for identifier, integration in integrations.items()
+        if identifier in _installed_integrations
+    }
+
+    for identifier in integrations:
+        logger.debug("Enabling integration %s", identifier)
+
+    return integrations
+
+
+def _check_minimum_version(integration, version, package=None):
+    # type: (type[Integration], Optional[tuple[int, ...]], Optional[str]) -> None
+    package = package or integration.identifier
+
+    if version is None:
+        raise DidNotEnable(f"Unparsable {package} version.")
+
+    min_version = _MIN_VERSIONS.get(integration.identifier)
+    if min_version is None:
+        return
+
+    if version < min_version:
+        raise DidNotEnable(
+            f"Integration only supports {package} {'.'.join(map(str, min_version))} or newer."
+        )
+
+
+class DidNotEnable(Exception):  # noqa: N818
+    """
+    The integration could not be enabled due to a trivial user error like
+    `flask` not being installed for the `FlaskIntegration`.
+
+    This exception is silently swallowed for default integrations, but reraised
+    for explicitly enabled integrations.
+    """
+
+
+class Integration(ABC):
+    """Baseclass for all integrations.
+
+    To accept options for an integration, implement your own constructor that
+    saves those options on `self`.
+    """
+
+    install = None
+    """Legacy method, do not implement."""
+
+    identifier = None  # type: str
+    """String unique ID of integration type"""
+
+    @staticmethod
+    @abstractmethod
+    def setup_once():
+        # type: () -> None
+        """
+        Initialize the integration.
+
+        This function is only called once, ever. Configuration is not available
+        at this point, so the only thing to do here is to hook into exception
+        handlers, and perhaps do monkeypatches.
+
+        Inside those hooks `Integration.current` can be used to access the
+        instance again.
+        """
+        pass
diff --git a/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/_asgi_common.py b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/_asgi_common.py
new file mode 100644
index 00000000..c16bbbcf
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/_asgi_common.py
@@ -0,0 +1,108 @@
+import urllib
+
+from sentry_sdk.scope import should_send_default_pii
+from sentry_sdk.integrations._wsgi_common import _filter_headers
+
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+    from typing import Any
+    from typing import Dict
+    from typing import Optional
+    from typing import Union
+    from typing_extensions import Literal
+
+    from sentry_sdk.utils import AnnotatedValue
+
+
+def _get_headers(asgi_scope):
+    # type: (Any) -> Dict[str, str]
+    """
+    Extract headers from the ASGI scope, in the format that the Sentry protocol expects.
+    """
+    headers = {}  # type: Dict[str, str]
+    for raw_key, raw_value in asgi_scope["headers"]:
+        key = raw_key.decode("latin-1")
+        value = raw_value.decode("latin-1")
+        if key in headers:
+            headers[key] = headers[key] + ", " + value
+        else:
+            headers[key] = value
+
+    return headers
+
+
+def _get_url(asgi_scope, default_scheme, host):
+    # type: (Dict[str, Any], Literal["ws", "http"], Optional[Union[AnnotatedValue, str]]) -> str
+    """
+    Extract URL from the ASGI scope, without also including the querystring.
+    """
+    scheme = asgi_scope.get("scheme", default_scheme)
+
+    server = asgi_scope.get("server", None)
+    path = asgi_scope.get("root_path", "") + asgi_scope.get("path", "")
+
+    if host:
+        return "%s://%s%s" % (scheme, host, path)
+
+    if server is not None:
+        host, port = server
+        default_port = {"http": 80, "https": 443, "ws": 80, "wss": 443}.get(scheme)
+        if port != default_port:
+            return "%s://%s:%s%s" % (scheme, host, port, path)
+        return "%s://%s%s" % (scheme, host, path)
+    return path
+
+
+def _get_query(asgi_scope):
+    # type: (Any) -> Any
+    """
+    Extract querystring from the ASGI scope, in the format that the Sentry protocol expects.
+    """
+    qs = asgi_scope.get("query_string")
+    if not qs:
+        return None
+    return urllib.parse.unquote(qs.decode("latin-1"))
+
+
+def _get_ip(asgi_scope):
+    # type: (Any) -> str
+    """
+    Extract IP Address from the ASGI scope based on request headers with fallback to scope client.
+    """
+    headers = _get_headers(asgi_scope)
+    try:
+        return headers["x-forwarded-for"].split(",")[0].strip()
+    except (KeyError, IndexError):
+        pass
+
+    try:
+        return headers["x-real-ip"]
+    except KeyError:
+        pass
+
+    return asgi_scope.get("client")[0]
+
+
+def _get_request_data(asgi_scope):
+    # type: (Any) -> Dict[str, Any]
+    """
+    Returns data related to the HTTP request from the ASGI scope.
+    """
+    request_data = {}  # type: Dict[str, Any]
+    ty = asgi_scope["type"]
+    if ty in ("http", "websocket"):
+        request_data["method"] = asgi_scope.get("method")
+
+        request_data["headers"] = headers = _filter_headers(_get_headers(asgi_scope))
+        request_data["query_string"] = _get_query(asgi_scope)
+
+        request_data["url"] = _get_url(
+            asgi_scope, "http" if ty == "http" else "ws", headers.get("host")
+        )
+
+    client = asgi_scope.get("client")
+    if client and should_send_default_pii():
+        request_data["env"] = {"REMOTE_ADDR": _get_ip(asgi_scope)}
+
+    return request_data
diff --git a/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/_wsgi_common.py b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/_wsgi_common.py
new file mode 100644
index 00000000..48bc4328
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/_wsgi_common.py
@@ -0,0 +1,271 @@
+from contextlib import contextmanager
+import json
+from copy import deepcopy
+
+import sentry_sdk
+from sentry_sdk.scope import should_send_default_pii
+from sentry_sdk.utils import AnnotatedValue, logger
+
+try:
+    from django.http.request import RawPostDataException
+except ImportError:
+    RawPostDataException = None
+
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+    from typing import Any
+    from typing import Dict
+    from typing import Iterator
+    from typing import Mapping
+    from typing import MutableMapping
+    from typing import Optional
+    from typing import Union
+    from sentry_sdk._types import Event, HttpStatusCodeRange
+
+
+SENSITIVE_ENV_KEYS = (
+    "REMOTE_ADDR",
+    "HTTP_X_FORWARDED_FOR",
+    "HTTP_SET_COOKIE",
+    "HTTP_COOKIE",
+    "HTTP_AUTHORIZATION",
+    "HTTP_X_API_KEY",
+    "HTTP_X_FORWARDED_FOR",
+    "HTTP_X_REAL_IP",
+)
+
+SENSITIVE_HEADERS = tuple(
+    x[len("HTTP_") :] for x in SENSITIVE_ENV_KEYS if x.startswith("HTTP_")
+)
+
+DEFAULT_HTTP_METHODS_TO_CAPTURE = (
+    "CONNECT",
+    "DELETE",
+    "GET",
+    # "HEAD",  # do not capture HEAD requests by default
+    # "OPTIONS",  # do not capture OPTIONS requests by default
+    "PATCH",
+    "POST",
+    "PUT",
+    "TRACE",
+)
+
+
+# This noop context manager can be replaced with "from contextlib import nullcontext" when we drop Python 3.6 support
+@contextmanager
+def nullcontext():
+    # type: () -> Iterator[None]
+    yield
+
+
+def request_body_within_bounds(client, content_length):
+    # type: (Optional[sentry_sdk.client.BaseClient], int) -> bool
+    if client is None:
+        return False
+
+    bodies = client.options["max_request_body_size"]
+    return not (
+        bodies == "never"
+        or (bodies == "small" and content_length > 10**3)
+        or (bodies == "medium" and content_length > 10**4)
+    )
+
+
+class RequestExtractor:
+    """
+    Base class for request extraction.
+    """
+
+    # It does not make sense to make this class an ABC because it is not used
+    # for typing, only so that child classes can inherit common methods from
+    # it. Only some child classes implement all methods that raise
+    # NotImplementedError in this class.
+
+    def __init__(self, request):
+        # type: (Any) -> None
+        self.request = request
+
+    def extract_into_event(self, event):
+        # type: (Event) -> None
+        client = sentry_sdk.get_client()
+        if not client.is_active():
+            return
+
+        data = None  # type: Optional[Union[AnnotatedValue, Dict[str, Any]]]
+
+        content_length = self.content_length()
+        request_info = event.get("request", {})
+
+        if should_send_default_pii():
+            request_info["cookies"] = dict(self.cookies())
+
+        if not request_body_within_bounds(client, content_length):
+            data = AnnotatedValue.removed_because_over_size_limit()
+        else:
+            # First read the raw body data
+            # It is important to read this first because if it is Django
+            # it will cache the body and then we can read the cached version
+            # again in parsed_body() (or json() or wherever).
+            raw_data = None
+            try:
+                raw_data = self.raw_data()
+            except (RawPostDataException, ValueError):
+                # If DjangoRestFramework is used it already read the body for us
+                # so reading it here will fail. We can ignore this.
+                pass
+
+            parsed_body = self.parsed_body()
+            if parsed_body is not None:
+                data = parsed_body
+            elif raw_data:
+                data = AnnotatedValue.removed_because_raw_data()
+            else:
+                data = None
+
+        if data is not None:
+            request_info["data"] = data
+
+        event["request"] = deepcopy(request_info)
+
+    def content_length(self):
+        # type: () -> int
+        try:
+            return int(self.env().get("CONTENT_LENGTH", 0))
+        except ValueError:
+            return 0
+
+    def cookies(self):
+        # type: () -> MutableMapping[str, Any]
+        raise NotImplementedError()
+
+    def raw_data(self):
+        # type: () -> Optional[Union[str, bytes]]
+        raise NotImplementedError()
+
+    def form(self):
+        # type: () -> Optional[Dict[str, Any]]
+        raise NotImplementedError()
+
+    def parsed_body(self):
+        # type: () -> Optional[Dict[str, Any]]
+        try:
+            form = self.form()
+        except Exception:
+            form = None
+        try:
+            files = self.files()
+        except Exception:
+            files = None
+
+        if form or files:
+            data = {}
+            if form:
+                data = dict(form.items())
+            if files:
+                for key in files.keys():
+                    data[key] = AnnotatedValue.removed_because_raw_data()
+
+            return data
+
+        return self.json()
+
+    def is_json(self):
+        # type: () -> bool
+        return _is_json_content_type(self.env().get("CONTENT_TYPE"))
+
+    def json(self):
+        # type: () -> Optional[Any]
+        try:
+            if not self.is_json():
+                return None
+
+            try:
+                raw_data = self.raw_data()
+            except (RawPostDataException, ValueError):
+                # The body might have already been read, in which case this will
+                # fail
+                raw_data = None
+
+            if raw_data is None:
+                return None
+
+            if isinstance(raw_data, str):
+                return json.loads(raw_data)
+            else:
+                return json.loads(raw_data.decode("utf-8"))
+        except ValueError:
+            pass
+
+        return None
+
+    def files(self):
+        # type: () -> Optional[Dict[str, Any]]
+        raise NotImplementedError()
+
+    def size_of_file(self, file):
+        # type: (Any) -> int
+        raise NotImplementedError()
+
+    def env(self):
+        # type: () -> Dict[str, Any]
+        raise NotImplementedError()
+
+
+def _is_json_content_type(ct):
+    # type: (Optional[str]) -> bool
+    mt = (ct or "").split(";", 1)[0]
+    return (
+        mt == "application/json"
+        or (mt.startswith("application/"))
+        and mt.endswith("+json")
+    )
+
+
+def _filter_headers(headers):
+    # type: (Mapping[str, str]) -> Mapping[str, Union[AnnotatedValue, str]]
+    if should_send_default_pii():
+        return headers
+
+    return {
+        k: (
+            v
+            if k.upper().replace("-", "_") not in SENSITIVE_HEADERS
+            else AnnotatedValue.removed_because_over_size_limit()
+        )
+        for k, v in headers.items()
+    }
+
+
+def _in_http_status_code_range(code, code_ranges):
+    # type: (object, list[HttpStatusCodeRange]) -> bool
+    for target in code_ranges:
+        if isinstance(target, int):
+            if code == target:
+                return True
+            continue
+
+        try:
+            if code in target:
+                return True
+        except TypeError:
+            logger.warning(
+                "failed_request_status_codes has to be a list of integers or containers"
+            )
+
+    return False
+
+
+class HttpCodeRangeContainer:
+    """
+    Wrapper to make it possible to use list[HttpStatusCodeRange] as a Container[int].
+    Used for backwards compatibility with the old `failed_request_status_codes` option.
+    """
+
+    def __init__(self, code_ranges):
+        # type: (list[HttpStatusCodeRange]) -> None
+        self._code_ranges = code_ranges
+
+    def __contains__(self, item):
+        # type: (object) -> bool
+        return _in_http_status_code_range(item, self._code_ranges)
diff --git a/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/aiohttp.py b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/aiohttp.py
new file mode 100644
index 00000000..ad3202bf
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/aiohttp.py
@@ -0,0 +1,357 @@
+import sys
+import weakref
+from functools import wraps
+
+import sentry_sdk
+from sentry_sdk.api import continue_trace
+from sentry_sdk.consts import OP, SPANSTATUS, SPANDATA
+from sentry_sdk.integrations import (
+    _DEFAULT_FAILED_REQUEST_STATUS_CODES,
+    _check_minimum_version,
+    Integration,
+    DidNotEnable,
+)
+from sentry_sdk.integrations.logging import ignore_logger
+from sentry_sdk.sessions import track_session
+from sentry_sdk.integrations._wsgi_common import (
+    _filter_headers,
+    request_body_within_bounds,
+)
+from sentry_sdk.tracing import (
+    BAGGAGE_HEADER_NAME,
+    SOURCE_FOR_STYLE,
+    TransactionSource,
+)
+from sentry_sdk.tracing_utils import should_propagate_trace
+from sentry_sdk.utils import (
+    capture_internal_exceptions,
+    ensure_integration_enabled,
+    event_from_exception,
+    logger,
+    parse_url,
+    parse_version,
+    reraise,
+    transaction_from_function,
+    HAS_REAL_CONTEXTVARS,
+    CONTEXTVARS_ERROR_MESSAGE,
+    SENSITIVE_DATA_SUBSTITUTE,
+    AnnotatedValue,
+)
+
+try:
+    import asyncio
+
+    from aiohttp import __version__ as AIOHTTP_VERSION
+    from aiohttp import ClientSession, TraceConfig
+    from aiohttp.web import Application, HTTPException, UrlDispatcher
+except ImportError:
+    raise DidNotEnable("AIOHTTP not installed")
+
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+    from aiohttp.web_request import Request
+    from aiohttp.web_urldispatcher import UrlMappingMatchInfo
+    from aiohttp import TraceRequestStartParams, TraceRequestEndParams
+
+    from collections.abc import Set
+    from types import SimpleNamespace
+    from typing import Any
+    from typing import Optional
+    from typing import Tuple
+    from typing import Union
+
+    from sentry_sdk.utils import ExcInfo
+    from sentry_sdk._types import Event, EventProcessor
+
+
+TRANSACTION_STYLE_VALUES = ("handler_name", "method_and_path_pattern")
+
+
+class AioHttpIntegration(Integration):
+    identifier = "aiohttp"
+    origin = f"auto.http.{identifier}"
+
+    def __init__(
+        self,
+        transaction_style="handler_name",  # type: str
+        *,
+        failed_request_status_codes=_DEFAULT_FAILED_REQUEST_STATUS_CODES,  # type: Set[int]
+    ):
+        # type: (...) -> None
+        if transaction_style not in TRANSACTION_STYLE_VALUES:
+            raise ValueError(
+                "Invalid value for transaction_style: %s (must be in %s)"
+                % (transaction_style, TRANSACTION_STYLE_VALUES)
+            )
+        self.transaction_style = transaction_style
+        self._failed_request_status_codes = failed_request_status_codes
+
+    @staticmethod
+    def setup_once():
+        # type: () -> None
+
+        version = parse_version(AIOHTTP_VERSION)
+        _check_minimum_version(AioHttpIntegration, version)
+
+        if not HAS_REAL_CONTEXTVARS:
+            # We better have contextvars or we're going to leak state between
+            # requests.
+            raise DidNotEnable(
+                "The aiohttp integration for Sentry requires Python 3.7+ "
+                " or aiocontextvars package." + CONTEXTVARS_ERROR_MESSAGE
+            )
+
+        ignore_logger("aiohttp.server")
+
+        old_handle = Application._handle
+
+        async def sentry_app_handle(self, request, *args, **kwargs):
+            # type: (Any, Request, *Any, **Any) -> Any
+            integration = sentry_sdk.get_client().get_integration(AioHttpIntegration)
+            if integration is None:
+                return await old_handle(self, request, *args, **kwargs)
+
+            weak_request = weakref.ref(request)
+
+            with sentry_sdk.isolation_scope() as scope:
+                with track_session(scope, session_mode="request"):
+                    # Scope data will not leak between requests because aiohttp
+                    # create a task to wrap each request.
+                    scope.generate_propagation_context()
+                    scope.clear_breadcrumbs()
+                    scope.add_event_processor(_make_request_processor(weak_request))
+
+                    headers = dict(request.headers)
+                    transaction = continue_trace(
+                        headers,
+                        op=OP.HTTP_SERVER,
+                        # If this transaction name makes it to the UI, AIOHTTP's
+                        # URL resolver did not find a route or died trying.
+                        name="generic AIOHTTP request",
+                        source=TransactionSource.ROUTE,
+                        origin=AioHttpIntegration.origin,
+                    )
+                    with sentry_sdk.start_transaction(
+                        transaction,
+                        custom_sampling_context={"aiohttp_request": request},
+                    ):
+                        try:
+                            response = await old_handle(self, request)
+                        except HTTPException as e:
+                            transaction.set_http_status(e.status_code)
+
+                            if (
+                                e.status_code
+                                in integration._failed_request_status_codes
+                            ):
+                                _capture_exception()
+
+                            raise
+                        except (asyncio.CancelledError, ConnectionResetError):
+                            transaction.set_status(SPANSTATUS.CANCELLED)
+                            raise
+                        except Exception:
+                            # This will probably map to a 500 but seems like we
+                            # have no way to tell. Do not set span status.
+                            reraise(*_capture_exception())
+
+                        try:
+                            # A valid response handler will return a valid response with a status. But, if the handler
+                            # returns an invalid response (e.g. None), the line below will raise an AttributeError.
+                            # Even though this is likely invalid, we need to handle this case to ensure we don't break
+                            # the application.
+                            response_status = response.status
+                        except AttributeError:
+                            pass
+                        else:
+                            transaction.set_http_status(response_status)
+
+                        return response
+
+        Application._handle = sentry_app_handle
+
+        old_urldispatcher_resolve = UrlDispatcher.resolve
+
+        @wraps(old_urldispatcher_resolve)
+        async def sentry_urldispatcher_resolve(self, request):
+            # type: (UrlDispatcher, Request) -> UrlMappingMatchInfo
+            rv = await old_urldispatcher_resolve(self, request)
+
+            integration = sentry_sdk.get_client().get_integration(AioHttpIntegration)
+            if integration is None:
+                return rv
+
+            name = None
+
+            try:
+                if integration.transaction_style == "handler_name":
+                    name = transaction_from_function(rv.handler)
+                elif integration.transaction_style == "method_and_path_pattern":
+                    route_info = rv.get_info()
+                    pattern = route_info.get("path") or route_info.get("formatter")
+                    name = "{} {}".format(request.method, pattern)
+            except Exception:
+                pass
+
+            if name is not None:
+                sentry_sdk.get_current_scope().set_transaction_name(
+                    name,
+                    source=SOURCE_FOR_STYLE[integration.transaction_style],
+                )
+
+            return rv
+
+        UrlDispatcher.resolve = sentry_urldispatcher_resolve
+
+        old_client_session_init = ClientSession.__init__
+
+        @ensure_integration_enabled(AioHttpIntegration, old_client_session_init)
+        def init(*args, **kwargs):
+            # type: (Any, Any) -> None
+            client_trace_configs = list(kwargs.get("trace_configs") or ())
+            trace_config = create_trace_config()
+            client_trace_configs.append(trace_config)
+
+            kwargs["trace_configs"] = client_trace_configs
+            return old_client_session_init(*args, **kwargs)
+
+        ClientSession.__init__ = init
+
+
+def create_trace_config():
+    # type: () -> TraceConfig
+
+    async def on_request_start(session, trace_config_ctx, params):
+        # type: (ClientSession, SimpleNamespace, TraceRequestStartParams) -> None
+        if sentry_sdk.get_client().get_integration(AioHttpIntegration) is None:
+            return
+
+        method = params.method.upper()
+
+        parsed_url = None
+        with capture_internal_exceptions():
+            parsed_url = parse_url(str(params.url), sanitize=False)
+
+        span = sentry_sdk.start_span(
+            op=OP.HTTP_CLIENT,
+            name="%s %s"
+            % (method, parsed_url.url if parsed_url else SENSITIVE_DATA_SUBSTITUTE),
+            origin=AioHttpIntegration.origin,
+        )
+        span.set_data(SPANDATA.HTTP_METHOD, method)
+        if parsed_url is not None:
+            span.set_data("url", parsed_url.url)
+            span.set_data(SPANDATA.HTTP_QUERY, parsed_url.query)
+            span.set_data(SPANDATA.HTTP_FRAGMENT, parsed_url.fragment)
+
+        client = sentry_sdk.get_client()
+
+        if should_propagate_trace(client, str(params.url)):
+            for (
+                key,
+                value,
+            ) in sentry_sdk.get_current_scope().iter_trace_propagation_headers(
+                span=span
+            ):
+                logger.debug(
+                    "[Tracing] Adding `{key}` header {value} to outgoing request to {url}.".format(
+                        key=key, value=value, url=params.url
+                    )
+                )
+                if key == BAGGAGE_HEADER_NAME and params.headers.get(
+                    BAGGAGE_HEADER_NAME
+                ):
+                    # do not overwrite any existing baggage, just append to it
+                    params.headers[key] += "," + value
+                else:
+                    params.headers[key] = value
+
+        trace_config_ctx.span = span
+
+    async def on_request_end(session, trace_config_ctx, params):
+        # type: (ClientSession, SimpleNamespace, TraceRequestEndParams) -> None
+        if trace_config_ctx.span is None:
+            return
+
+        span = trace_config_ctx.span
+        span.set_http_status(int(params.response.status))
+        span.set_data("reason", params.response.reason)
+        span.finish()
+
+    trace_config = TraceConfig()
+
+    trace_config.on_request_start.append(on_request_start)
+    trace_config.on_request_end.append(on_request_end)
+
+    return trace_config
+
+
+def _make_request_processor(weak_request):
+    # type: (weakref.ReferenceType[Request]) -> EventProcessor
+    def aiohttp_processor(
+        event,  # type: Event
+        hint,  # type: dict[str, Tuple[type, BaseException, Any]]
+    ):
+        # type: (...) -> Event
+        request = weak_request()
+        if request is None:
+            return event
+
+        with capture_internal_exceptions():
+            request_info = event.setdefault("request", {})
+
+            request_info["url"] = "%s://%s%s" % (
+                request.scheme,
+                request.host,
+                request.path,
+            )
+
+            request_info["query_string"] = request.query_string
+            request_info["method"] = request.method
+            request_info["env"] = {"REMOTE_ADDR": request.remote}
+            request_info["headers"] = _filter_headers(dict(request.headers))
+
+            # Just attach raw data here if it is within bounds, if available.
+            # Unfortunately there's no way to get structured data from aiohttp
+            # without awaiting on some coroutine.
+            request_info["data"] = get_aiohttp_request_data(request)
+
+        return event
+
+    return aiohttp_processor
+
+
+def _capture_exception():
+    # type: () -> ExcInfo
+    exc_info = sys.exc_info()
+    event, hint = event_from_exception(
+        exc_info,
+        client_options=sentry_sdk.get_client().options,
+        mechanism={"type": "aiohttp", "handled": False},
+    )
+    sentry_sdk.capture_event(event, hint=hint)
+    return exc_info
+
+
+BODY_NOT_READ_MESSAGE = "[Can't show request body due to implementation details.]"
+
+
+def get_aiohttp_request_data(request):
+    # type: (Request) -> Union[Optional[str], AnnotatedValue]
+    bytes_body = request._read_bytes
+
+    if bytes_body is not None:
+        # we have body to show
+        if not request_body_within_bounds(sentry_sdk.get_client(), len(bytes_body)):
+            return AnnotatedValue.removed_because_over_size_limit()
+
+        encoding = request.charset or "utf-8"
+        return bytes_body.decode(encoding, "replace")
+
+    if request.can_read_body:
+        # body exists but we can't show it
+        return BODY_NOT_READ_MESSAGE
+
+    # request has no body
+    return None
diff --git a/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/anthropic.py b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/anthropic.py
new file mode 100644
index 00000000..4cb54309
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/anthropic.py
@@ -0,0 +1,288 @@
+from functools import wraps
+from typing import TYPE_CHECKING
+
+import sentry_sdk
+from sentry_sdk.ai.monitoring import record_token_usage
+from sentry_sdk.consts import OP, SPANDATA
+from sentry_sdk.integrations import _check_minimum_version, DidNotEnable, Integration
+from sentry_sdk.scope import should_send_default_pii
+from sentry_sdk.utils import (
+    capture_internal_exceptions,
+    event_from_exception,
+    package_version,
+)
+
+try:
+    from anthropic.resources import AsyncMessages, Messages
+
+    if TYPE_CHECKING:
+        from anthropic.types import MessageStreamEvent
+except ImportError:
+    raise DidNotEnable("Anthropic not installed")
+
+if TYPE_CHECKING:
+    from typing import Any, AsyncIterator, Iterator
+    from sentry_sdk.tracing import Span
+
+
+class AnthropicIntegration(Integration):
+    identifier = "anthropic"
+    origin = f"auto.ai.{identifier}"
+
+    def __init__(self, include_prompts=True):
+        # type: (AnthropicIntegration, bool) -> None
+        self.include_prompts = include_prompts
+
+    @staticmethod
+    def setup_once():
+        # type: () -> None
+        version = package_version("anthropic")
+        _check_minimum_version(AnthropicIntegration, version)
+
+        Messages.create = _wrap_message_create(Messages.create)
+        AsyncMessages.create = _wrap_message_create_async(AsyncMessages.create)
+
+
+def _capture_exception(exc):
+    # type: (Any) -> None
+    event, hint = event_from_exception(
+        exc,
+        client_options=sentry_sdk.get_client().options,
+        mechanism={"type": "anthropic", "handled": False},
+    )
+    sentry_sdk.capture_event(event, hint=hint)
+
+
+def _calculate_token_usage(result, span):
+    # type: (Messages, Span) -> None
+    input_tokens = 0
+    output_tokens = 0
+    if hasattr(result, "usage"):
+        usage = result.usage
+        if hasattr(usage, "input_tokens") and isinstance(usage.input_tokens, int):
+            input_tokens = usage.input_tokens
+        if hasattr(usage, "output_tokens") and isinstance(usage.output_tokens, int):
+            output_tokens = usage.output_tokens
+
+    total_tokens = input_tokens + output_tokens
+    record_token_usage(span, input_tokens, output_tokens, total_tokens)
+
+
+def _get_responses(content):
+    # type: (list[Any]) -> list[dict[str, Any]]
+    """
+    Get JSON of a Anthropic responses.
+    """
+    responses = []
+    for item in content:
+        if hasattr(item, "text"):
+            responses.append(
+                {
+                    "type": item.type,
+                    "text": item.text,
+                }
+            )
+    return responses
+
+
+def _collect_ai_data(event, input_tokens, output_tokens, content_blocks):
+    # type: (MessageStreamEvent, int, int, list[str]) -> tuple[int, int, list[str]]
+    """
+    Count token usage and collect content blocks from the AI streaming response.
+    """
+    with capture_internal_exceptions():
+        if hasattr(event, "type"):
+            if event.type == "message_start":
+                usage = event.message.usage
+                input_tokens += usage.input_tokens
+                output_tokens += usage.output_tokens
+            elif event.type == "content_block_start":
+                pass
+            elif event.type == "content_block_delta":
+                if hasattr(event.delta, "text"):
+                    content_blocks.append(event.delta.text)
+                elif hasattr(event.delta, "partial_json"):
+                    content_blocks.append(event.delta.partial_json)
+            elif event.type == "content_block_stop":
+                pass
+            elif event.type == "message_delta":
+                output_tokens += event.usage.output_tokens
+
+    return input_tokens, output_tokens, content_blocks
+
+
+def _add_ai_data_to_span(
+    span, integration, input_tokens, output_tokens, content_blocks
+):
+    # type: (Span, AnthropicIntegration, int, int, list[str]) -> None
+    """
+    Add token usage and content blocks from the AI streaming response to the span.
+    """
+    with capture_internal_exceptions():
+        if should_send_default_pii() and integration.include_prompts:
+            complete_message = "".join(content_blocks)
+            span.set_data(
+                SPANDATA.AI_RESPONSES,
+                [{"type": "text", "text": complete_message}],
+            )
+        total_tokens = input_tokens + output_tokens
+        record_token_usage(span, input_tokens, output_tokens, total_tokens)
+        span.set_data(SPANDATA.AI_STREAMING, True)
+
+
+def _sentry_patched_create_common(f, *args, **kwargs):
+    # type: (Any, *Any, **Any) -> Any
+    integration = kwargs.pop("integration")
+    if integration is None:
+        return f(*args, **kwargs)
+
+    if "messages" not in kwargs:
+        return f(*args, **kwargs)
+
+    try:
+        iter(kwargs["messages"])
+    except TypeError:
+        return f(*args, **kwargs)
+
+    span = sentry_sdk.start_span(
+        op=OP.ANTHROPIC_MESSAGES_CREATE,
+        description="Anthropic messages create",
+        origin=AnthropicIntegration.origin,
+    )
+    span.__enter__()
+
+    result = yield f, args, kwargs
+
+    # add data to span and finish it
+    messages = list(kwargs["messages"])
+    model = kwargs.get("model")
+
+    with capture_internal_exceptions():
+        span.set_data(SPANDATA.AI_MODEL_ID, model)
+        span.set_data(SPANDATA.AI_STREAMING, False)
+
+        if should_send_default_pii() and integration.include_prompts:
+            span.set_data(SPANDATA.AI_INPUT_MESSAGES, messages)
+
+        if hasattr(result, "content"):
+            if should_send_default_pii() and integration.include_prompts:
+                span.set_data(SPANDATA.AI_RESPONSES, _get_responses(result.content))
+            _calculate_token_usage(result, span)
+            span.__exit__(None, None, None)
+
+        # Streaming response
+        elif hasattr(result, "_iterator"):
+            old_iterator = result._iterator
+
+            def new_iterator():
+                # type: () -> Iterator[MessageStreamEvent]
+                input_tokens = 0
+                output_tokens = 0
+                content_blocks = []  # type: list[str]
+
+                for event in old_iterator:
+                    input_tokens, output_tokens, content_blocks = _collect_ai_data(
+                        event, input_tokens, output_tokens, content_blocks
+                    )
+                    if event.type != "message_stop":
+                        yield event
+
+                _add_ai_data_to_span(
+                    span, integration, input_tokens, output_tokens, content_blocks
+                )
+                span.__exit__(None, None, None)
+
+            async def new_iterator_async():
+                # type: () -> AsyncIterator[MessageStreamEvent]
+                input_tokens = 0
+                output_tokens = 0
+                content_blocks = []  # type: list[str]
+
+                async for event in old_iterator:
+                    input_tokens, output_tokens, content_blocks = _collect_ai_data(
+                        event, input_tokens, output_tokens, content_blocks
+                    )
+                    if event.type != "message_stop":
+                        yield event
+
+                _add_ai_data_to_span(
+                    span, integration, input_tokens, output_tokens, content_blocks
+                )
+                span.__exit__(None, None, None)
+
+            if str(type(result._iterator)) == "<class 'async_generator'>":
+                result._iterator = new_iterator_async()
+            else:
+                result._iterator = new_iterator()
+
+        else:
+            span.set_data("unknown_response", True)
+            span.__exit__(None, None, None)
+
+    return result
+
+
+def _wrap_message_create(f):
+    # type: (Any) -> Any
+    def _execute_sync(f, *args, **kwargs):
+        # type: (Any, *Any, **Any) -> Any
+        gen = _sentry_patched_create_common(f, *args, **kwargs)
+
+        try:
+            f, args, kwargs = next(gen)
+        except StopIteration as e:
+            return e.value
+
+        try:
+            try:
+                result = f(*args, **kwargs)
+            except Exception as exc:
+                _capture_exception(exc)
+                raise exc from None
+
+            return gen.send(result)
+        except StopIteration as e:
+            return e.value
+
+    @wraps(f)
+    def _sentry_patched_create_sync(*args, **kwargs):
+        # type: (*Any, **Any) -> Any
+        integration = sentry_sdk.get_client().get_integration(AnthropicIntegration)
+        kwargs["integration"] = integration
+
+        return _execute_sync(f, *args, **kwargs)
+
+    return _sentry_patched_create_sync
+
+
+def _wrap_message_create_async(f):
+    # type: (Any) -> Any
+    async def _execute_async(f, *args, **kwargs):
+        # type: (Any, *Any, **Any) -> Any
+        gen = _sentry_patched_create_common(f, *args, **kwargs)
+
+        try:
+            f, args, kwargs = next(gen)
+        except StopIteration as e:
+            return await e.value
+
+        try:
+            try:
+                result = await f(*args, **kwargs)
+            except Exception as exc:
+                _capture_exception(exc)
+                raise exc from None
+
+            return gen.send(result)
+        except StopIteration as e:
+            return e.value
+
+    @wraps(f)
+    async def _sentry_patched_create_async(*args, **kwargs):
+        # type: (*Any, **Any) -> Any
+        integration = sentry_sdk.get_client().get_integration(AnthropicIntegration)
+        kwargs["integration"] = integration
+
+        return await _execute_async(f, *args, **kwargs)
+
+    return _sentry_patched_create_async
diff --git a/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/argv.py b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/argv.py
new file mode 100644
index 00000000..315feefb
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/argv.py
@@ -0,0 +1,31 @@
+import sys
+
+import sentry_sdk
+from sentry_sdk.integrations import Integration
+from sentry_sdk.scope import add_global_event_processor
+
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+    from typing import Optional
+
+    from sentry_sdk._types import Event, Hint
+
+
+class ArgvIntegration(Integration):
+    identifier = "argv"
+
+    @staticmethod
+    def setup_once():
+        # type: () -> None
+        @add_global_event_processor
+        def processor(event, hint):
+            # type: (Event, Optional[Hint]) -> Optional[Event]
+            if sentry_sdk.get_client().get_integration(ArgvIntegration) is not None:
+                extra = event.setdefault("extra", {})
+                # If some event processor decided to set extra to e.g. an
+                # `int`, don't crash. Not here.
+                if isinstance(extra, dict):
+                    extra["sys.argv"] = sys.argv
+
+            return event
diff --git a/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/ariadne.py b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/ariadne.py
new file mode 100644
index 00000000..1a95bc01
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/ariadne.py
@@ -0,0 +1,161 @@
+from importlib import import_module
+
+import sentry_sdk
+from sentry_sdk import get_client, capture_event
+from sentry_sdk.integrations import _check_minimum_version, DidNotEnable, Integration
+from sentry_sdk.integrations.logging import ignore_logger
+from sentry_sdk.integrations._wsgi_common import request_body_within_bounds
+from sentry_sdk.scope import should_send_default_pii
+from sentry_sdk.utils import (
+    capture_internal_exceptions,
+    ensure_integration_enabled,
+    event_from_exception,
+    package_version,
+)
+
+try:
+    # importing like this is necessary due to name shadowing in ariadne
+    # (ariadne.graphql is also a function)
+    ariadne_graphql = import_module("ariadne.graphql")
+except ImportError:
+    raise DidNotEnable("ariadne is not installed")
+
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+    from typing import Any, Dict, List, Optional
+    from ariadne.types import GraphQLError, GraphQLResult, GraphQLSchema, QueryParser  # type: ignore
+    from graphql.language.ast import DocumentNode
+    from sentry_sdk._types import Event, EventProcessor
+
+
+class AriadneIntegration(Integration):
+    identifier = "ariadne"
+
+    @staticmethod
+    def setup_once():
+        # type: () -> None
+        version = package_version("ariadne")
+        _check_minimum_version(AriadneIntegration, version)
+
+        ignore_logger("ariadne")
+
+        _patch_graphql()
+
+
+def _patch_graphql():
+    # type: () -> None
+    old_parse_query = ariadne_graphql.parse_query
+    old_handle_errors = ariadne_graphql.handle_graphql_errors
+    old_handle_query_result = ariadne_graphql.handle_query_result
+
+    @ensure_integration_enabled(AriadneIntegration, old_parse_query)
+    def _sentry_patched_parse_query(context_value, query_parser, data):
+        # type: (Optional[Any], Optional[QueryParser], Any) -> DocumentNode
+        event_processor = _make_request_event_processor(data)
+        sentry_sdk.get_isolation_scope().add_event_processor(event_processor)
+
+        result = old_parse_query(context_value, query_parser, data)
+        return result
+
+    @ensure_integration_enabled(AriadneIntegration, old_handle_errors)
+    def _sentry_patched_handle_graphql_errors(errors, *args, **kwargs):
+        # type: (List[GraphQLError], Any, Any) -> GraphQLResult
+        result = old_handle_errors(errors, *args, **kwargs)
+
+        event_processor = _make_response_event_processor(result[1])
+        sentry_sdk.get_isolation_scope().add_event_processor(event_processor)
+
+        client = get_client()
+        if client.is_active():
+            with capture_internal_exceptions():
+                for error in errors:
+                    event, hint = event_from_exception(
+                        error,
+                        client_options=client.options,
+                        mechanism={
+                            "type": AriadneIntegration.identifier,
+                            "handled": False,
+                        },
+                    )
+                    capture_event(event, hint=hint)
+
+        return result
+
+    @ensure_integration_enabled(AriadneIntegration, old_handle_query_result)
+    def _sentry_patched_handle_query_result(result, *args, **kwargs):
+        # type: (Any, Any, Any) -> GraphQLResult
+        query_result = old_handle_query_result(result, *args, **kwargs)
+
+        event_processor = _make_response_event_processor(query_result[1])
+        sentry_sdk.get_isolation_scope().add_event_processor(event_processor)
+
+        client = get_client()
+        if client.is_active():
+            with capture_internal_exceptions():
+                for error in result.errors or []:
+                    event, hint = event_from_exception(
+                        error,
+                        client_options=client.options,
+                        mechanism={
+                            "type": AriadneIntegration.identifier,
+                            "handled": False,
+                        },
+                    )
+                    capture_event(event, hint=hint)
+
+        return query_result
+
+    ariadne_graphql.parse_query = _sentry_patched_parse_query  # type: ignore
+    ariadne_graphql.handle_graphql_errors = _sentry_patched_handle_graphql_errors  # type: ignore
+    ariadne_graphql.handle_query_result = _sentry_patched_handle_query_result  # type: ignore
+
+
+def _make_request_event_processor(data):
+    # type: (GraphQLSchema) -> EventProcessor
+    """Add request data and api_target to events."""
+
+    def inner(event, hint):
+        # type: (Event, dict[str, Any]) -> Event
+        if not isinstance(data, dict):
+            return event
+
+        with capture_internal_exceptions():
+            try:
+                content_length = int(
+                    (data.get("headers") or {}).get("Content-Length", 0)
+                )
+            except (TypeError, ValueError):
+                return event
+
+            if should_send_default_pii() and request_body_within_bounds(
+                get_client(), content_length
+            ):
+                request_info = event.setdefault("request", {})
+                request_info["api_target"] = "graphql"
+                request_info["data"] = data
+
+            elif event.get("request", {}).get("data"):
+                del event["request"]["data"]
+
+        return event
+
+    return inner
+
+
+def _make_response_event_processor(response):
+    # type: (Dict[str, Any]) -> EventProcessor
+    """Add response data to the event's response context."""
+
+    def inner(event, hint):
+        # type: (Event, dict[str, Any]) -> Event
+        with capture_internal_exceptions():
+            if should_send_default_pii() and response.get("errors"):
+                contexts = event.setdefault("contexts", {})
+                contexts["response"] = {
+                    "data": response,
+                }
+
+        return event
+
+    return inner
diff --git a/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/arq.py b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/arq.py
new file mode 100644
index 00000000..1ea8e32f
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/arq.py
@@ -0,0 +1,246 @@
+import sys
+
+import sentry_sdk
+from sentry_sdk.consts import OP, SPANSTATUS
+from sentry_sdk.integrations import _check_minimum_version, DidNotEnable, Integration
+from sentry_sdk.integrations.logging import ignore_logger
+from sentry_sdk.scope import should_send_default_pii
+from sentry_sdk.tracing import Transaction, TransactionSource
+from sentry_sdk.utils import (
+    capture_internal_exceptions,
+    ensure_integration_enabled,
+    event_from_exception,
+    SENSITIVE_DATA_SUBSTITUTE,
+    parse_version,
+    reraise,
+)
+
+try:
+    import arq.worker
+    from arq.version import VERSION as ARQ_VERSION
+    from arq.connections import ArqRedis
+    from arq.worker import JobExecutionFailed, Retry, RetryJob, Worker
+except ImportError:
+    raise DidNotEnable("Arq is not installed")
+
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+    from typing import Any, Dict, Optional, Union
+
+    from sentry_sdk._types import EventProcessor, Event, ExcInfo, Hint
+
+    from arq.cron import CronJob
+    from arq.jobs import Job
+    from arq.typing import WorkerCoroutine
+    from arq.worker import Function
+
+ARQ_CONTROL_FLOW_EXCEPTIONS = (JobExecutionFailed, Retry, RetryJob)
+
+
+class ArqIntegration(Integration):
+    identifier = "arq"
+    origin = f"auto.queue.{identifier}"
+
+    @staticmethod
+    def setup_once():
+        # type: () -> None
+
+        try:
+            if isinstance(ARQ_VERSION, str):
+                version = parse_version(ARQ_VERSION)
+            else:
+                version = ARQ_VERSION.version[:2]
+
+        except (TypeError, ValueError):
+            version = None
+
+        _check_minimum_version(ArqIntegration, version)
+
+        patch_enqueue_job()
+        patch_run_job()
+        patch_create_worker()
+
+        ignore_logger("arq.worker")
+
+
+def patch_enqueue_job():
+    # type: () -> None
+    old_enqueue_job = ArqRedis.enqueue_job
+    original_kwdefaults = old_enqueue_job.__kwdefaults__
+
+    async def _sentry_enqueue_job(self, function, *args, **kwargs):
+        # type: (ArqRedis, str, *Any, **Any) -> Optional[Job]
+        integration = sentry_sdk.get_client().get_integration(ArqIntegration)
+        if integration is None:
+            return await old_enqueue_job(self, function, *args, **kwargs)
+
+        with sentry_sdk.start_span(
+            op=OP.QUEUE_SUBMIT_ARQ, name=function, origin=ArqIntegration.origin
+        ):
+            return await old_enqueue_job(self, function, *args, **kwargs)
+
+    _sentry_enqueue_job.__kwdefaults__ = original_kwdefaults
+    ArqRedis.enqueue_job = _sentry_enqueue_job
+
+
+def patch_run_job():
+    # type: () -> None
+    old_run_job = Worker.run_job
+
+    async def _sentry_run_job(self, job_id, score):
+        # type: (Worker, str, int) -> None
+        integration = sentry_sdk.get_client().get_integration(ArqIntegration)
+        if integration is None:
+            return await old_run_job(self, job_id, score)
+
+        with sentry_sdk.isolation_scope() as scope:
+            scope._name = "arq"
+            scope.clear_breadcrumbs()
+
+            transaction = Transaction(
+                name="unknown arq task",
+                status="ok",
+                op=OP.QUEUE_TASK_ARQ,
+                source=TransactionSource.TASK,
+                origin=ArqIntegration.origin,
+            )
+
+            with sentry_sdk.start_transaction(transaction):
+                return await old_run_job(self, job_id, score)
+
+    Worker.run_job = _sentry_run_job
+
+
+def _capture_exception(exc_info):
+    # type: (ExcInfo) -> None
+    scope = sentry_sdk.get_current_scope()
+
+    if scope.transaction is not None:
+        if exc_info[0] in ARQ_CONTROL_FLOW_EXCEPTIONS:
+            scope.transaction.set_status(SPANSTATUS.ABORTED)
+            return
+
+        scope.transaction.set_status(SPANSTATUS.INTERNAL_ERROR)
+
+    event, hint = event_from_exception(
+        exc_info,
+        client_options=sentry_sdk.get_client().options,
+        mechanism={"type": ArqIntegration.identifier, "handled": False},
+    )
+    sentry_sdk.capture_event(event, hint=hint)
+
+
+def _make_event_processor(ctx, *args, **kwargs):
+    # type: (Dict[Any, Any], *Any, **Any) -> EventProcessor
+    def event_processor(event, hint):
+        # type: (Event, Hint) -> Optional[Event]
+
+        with capture_internal_exceptions():
+            scope = sentry_sdk.get_current_scope()
+            if scope.transaction is not None:
+                scope.transaction.name = ctx["job_name"]
+                event["transaction"] = ctx["job_name"]
+
+            tags = event.setdefault("tags", {})
+            tags["arq_task_id"] = ctx["job_id"]
+            tags["arq_task_retry"] = ctx["job_try"] > 1
+            extra = event.setdefault("extra", {})
+            extra["arq-job"] = {
+                "task": ctx["job_name"],
+                "args": (
+                    args if should_send_default_pii() else SENSITIVE_DATA_SUBSTITUTE
+                ),
+                "kwargs": (
+                    kwargs if should_send_default_pii() else SENSITIVE_DATA_SUBSTITUTE
+                ),
+                "retry": ctx["job_try"],
+            }
+
+        return event
+
+    return event_processor
+
+
+def _wrap_coroutine(name, coroutine):
+    # type: (str, WorkerCoroutine) -> WorkerCoroutine
+
+    async def _sentry_coroutine(ctx, *args, **kwargs):
+        # type: (Dict[Any, Any], *Any, **Any) -> Any
+        integration = sentry_sdk.get_client().get_integration(ArqIntegration)
+        if integration is None:
+            return await coroutine(ctx, *args, **kwargs)
+
+        sentry_sdk.get_isolation_scope().add_event_processor(
+            _make_event_processor({**ctx, "job_name": name}, *args, **kwargs)
+        )
+
+        try:
+            result = await coroutine(ctx, *args, **kwargs)
+        except Exception:
+            exc_info = sys.exc_info()
+            _capture_exception(exc_info)
+            reraise(*exc_info)
+
+        return result
+
+    return _sentry_coroutine
+
+
+def patch_create_worker():
+    # type: () -> None
+    old_create_worker = arq.worker.create_worker
+
+    @ensure_integration_enabled(ArqIntegration, old_create_worker)
+    def _sentry_create_worker(*args, **kwargs):
+        # type: (*Any, **Any) -> Worker
+        settings_cls = args[0]
+
+        if isinstance(settings_cls, dict):
+            if "functions" in settings_cls:
+                settings_cls["functions"] = [
+                    _get_arq_function(func)
+                    for func in settings_cls.get("functions", [])
+                ]
+            if "cron_jobs" in settings_cls:
+                settings_cls["cron_jobs"] = [
+                    _get_arq_cron_job(cron_job)
+                    for cron_job in settings_cls.get("cron_jobs", [])
+                ]
+
+        if hasattr(settings_cls, "functions"):
+            settings_cls.functions = [
+                _get_arq_function(func) for func in settings_cls.functions
+            ]
+        if hasattr(settings_cls, "cron_jobs"):
+            settings_cls.cron_jobs = [
+                _get_arq_cron_job(cron_job) for cron_job in settings_cls.cron_jobs
+            ]
+
+        if "functions" in kwargs:
+            kwargs["functions"] = [
+                _get_arq_function(func) for func in kwargs.get("functions", [])
+            ]
+        if "cron_jobs" in kwargs:
+            kwargs["cron_jobs"] = [
+                _get_arq_cron_job(cron_job) for cron_job in kwargs.get("cron_jobs", [])
+            ]
+
+        return old_create_worker(*args, **kwargs)
+
+    arq.worker.create_worker = _sentry_create_worker
+
+
+def _get_arq_function(func):
+    # type: (Union[str, Function, WorkerCoroutine]) -> Function
+    arq_func = arq.worker.func(func)
+    arq_func.coroutine = _wrap_coroutine(arq_func.name, arq_func.coroutine)
+
+    return arq_func
+
+
+def _get_arq_cron_job(cron_job):
+    # type: (CronJob) -> CronJob
+    cron_job.coroutine = _wrap_coroutine(cron_job.name, cron_job.coroutine)
+
+    return cron_job
diff --git a/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/asgi.py b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/asgi.py
new file mode 100644
index 00000000..3569336a
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/asgi.py
@@ -0,0 +1,337 @@
+"""
+An ASGI middleware.
+
+Based on Tom Christie's `sentry-asgi <https://github.com/encode/sentry-asgi>`.
+"""
+
+import asyncio
+import inspect
+from copy import deepcopy
+from functools import partial
+
+import sentry_sdk
+from sentry_sdk.api import continue_trace
+from sentry_sdk.consts import OP
+
+from sentry_sdk.integrations._asgi_common import (
+    _get_headers,
+    _get_request_data,
+    _get_url,
+)
+from sentry_sdk.integrations._wsgi_common import (
+    DEFAULT_HTTP_METHODS_TO_CAPTURE,
+    nullcontext,
+)
+from sentry_sdk.sessions import track_session
+from sentry_sdk.tracing import (
+    SOURCE_FOR_STYLE,
+    TransactionSource,
+)
+from sentry_sdk.utils import (
+    ContextVar,
+    event_from_exception,
+    HAS_REAL_CONTEXTVARS,
+    CONTEXTVARS_ERROR_MESSAGE,
+    logger,
+    transaction_from_function,
+    _get_installed_modules,
+)
+from sentry_sdk.tracing import Transaction
+
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+    from typing import Any
+    from typing import Callable
+    from typing import Dict
+    from typing import Optional
+    from typing import Tuple
+
+    from sentry_sdk._types import Event, Hint
+
+
+_asgi_middleware_applied = ContextVar("sentry_asgi_middleware_applied")
+
+_DEFAULT_TRANSACTION_NAME = "generic ASGI request"
+
+TRANSACTION_STYLE_VALUES = ("endpoint", "url")
+
+
+def _capture_exception(exc, mechanism_type="asgi"):
+    # type: (Any, str) -> None
+
+    event, hint = event_from_exception(
+        exc,
+        client_options=sentry_sdk.get_client().options,
+        mechanism={"type": mechanism_type, "handled": False},
+    )
+    sentry_sdk.capture_event(event, hint=hint)
+
+
+def _looks_like_asgi3(app):
+    # type: (Any) -> bool
+    """
+    Try to figure out if an application object supports ASGI3.
+
+    This is how uvicorn figures out the application version as well.
+    """
+    if inspect.isclass(app):
+        return hasattr(app, "__await__")
+    elif inspect.isfunction(app):
+        return asyncio.iscoroutinefunction(app)
+    else:
+        call = getattr(app, "__call__", None)  # noqa
+        return asyncio.iscoroutinefunction(call)
+
+
+class SentryAsgiMiddleware:
+    __slots__ = (
+        "app",
+        "__call__",
+        "transaction_style",
+        "mechanism_type",
+        "span_origin",
+        "http_methods_to_capture",
+    )
+
+    def __init__(
+        self,
+        app,  # type: Any
+        unsafe_context_data=False,  # type: bool
+        transaction_style="endpoint",  # type: str
+        mechanism_type="asgi",  # type: str
+        span_origin="manual",  # type: str
+        http_methods_to_capture=DEFAULT_HTTP_METHODS_TO_CAPTURE,  # type: Tuple[str, ...]
+    ):
+        # type: (...) -> None
+        """
+        Instrument an ASGI application with Sentry. Provides HTTP/websocket
+        data to sent events and basic handling for exceptions bubbling up
+        through the middleware.
+
+        :param unsafe_context_data: Disable errors when a proper contextvars installation could not be found. We do not recommend changing this from the default.
+        """
+        if not unsafe_context_data and not HAS_REAL_CONTEXTVARS:
+            # We better have contextvars or we're going to leak state between
+            # requests.
+            raise RuntimeError(
+                "The ASGI middleware for Sentry requires Python 3.7+ "
+                "or the aiocontextvars package." + CONTEXTVARS_ERROR_MESSAGE
+            )
+        if transaction_style not in TRANSACTION_STYLE_VALUES:
+            raise ValueError(
+                "Invalid value for transaction_style: %s (must be in %s)"
+                % (transaction_style, TRANSACTION_STYLE_VALUES)
+            )
+
+        asgi_middleware_while_using_starlette_or_fastapi = (
+            mechanism_type == "asgi" and "starlette" in _get_installed_modules()
+        )
+        if asgi_middleware_while_using_starlette_or_fastapi:
+            logger.warning(
+                "The Sentry Python SDK can now automatically support ASGI frameworks like Starlette and FastAPI. "
+                "Please remove 'SentryAsgiMiddleware' from your project. "
+                "See https://docs.sentry.io/platforms/python/guides/asgi/ for more information."
+            )
+
+        self.transaction_style = transaction_style
+        self.mechanism_type = mechanism_type
+        self.span_origin = span_origin
+        self.app = app
+        self.http_methods_to_capture = http_methods_to_capture
+
+        if _looks_like_asgi3(app):
+            self.__call__ = self._run_asgi3  # type: Callable[..., Any]
+        else:
+            self.__call__ = self._run_asgi2
+
+    def _run_asgi2(self, scope):
+        # type: (Any) -> Any
+        async def inner(receive, send):
+            # type: (Any, Any) -> Any
+            return await self._run_app(scope, receive, send, asgi_version=2)
+
+        return inner
+
+    async def _run_asgi3(self, scope, receive, send):
+        # type: (Any, Any, Any) -> Any
+        return await self._run_app(scope, receive, send, asgi_version=3)
+
+    async def _run_app(self, scope, receive, send, asgi_version):
+        # type: (Any, Any, Any, Any, int) -> Any
+        is_recursive_asgi_middleware = _asgi_middleware_applied.get(False)
+        is_lifespan = scope["type"] == "lifespan"
+        if is_recursive_asgi_middleware or is_lifespan:
+            try:
+                if asgi_version == 2:
+                    return await self.app(scope)(receive, send)
+                else:
+                    return await self.app(scope, receive, send)
+
+            except Exception as exc:
+                _capture_exception(exc, mechanism_type=self.mechanism_type)
+                raise exc from None
+
+        _asgi_middleware_applied.set(True)
+        try:
+            with sentry_sdk.isolation_scope() as sentry_scope:
+                with track_session(sentry_scope, session_mode="request"):
+                    sentry_scope.clear_breadcrumbs()
+                    sentry_scope._name = "asgi"
+                    processor = partial(self.event_processor, asgi_scope=scope)
+                    sentry_scope.add_event_processor(processor)
+
+                    ty = scope["type"]
+                    (
+                        transaction_name,
+                        transaction_source,
+                    ) = self._get_transaction_name_and_source(
+                        self.transaction_style,
+                        scope,
+                    )
+
+                    method = scope.get("method", "").upper()
+                    transaction = None
+                    if method in self.http_methods_to_capture:
+                        if ty in ("http", "websocket"):
+                            transaction = continue_trace(
+                                _get_headers(scope),
+                                op="{}.server".format(ty),
+                                name=transaction_name,
+                                source=transaction_source,
+                                origin=self.span_origin,
+                            )
+                            logger.debug(
+                                "[ASGI] Created transaction (continuing trace): %s",
+                                transaction,
+                            )
+                        else:
+                            transaction = Transaction(
+                                op=OP.HTTP_SERVER,
+                                name=transaction_name,
+                                source=transaction_source,
+                                origin=self.span_origin,
+                            )
+                            logger.debug(
+                                "[ASGI] Created transaction (new): %s", transaction
+                            )
+
+                        transaction.set_tag("asgi.type", ty)
+                        logger.debug(
+                            "[ASGI] Set transaction name and source on transaction: '%s' / '%s'",
+                            transaction.name,
+                            transaction.source,
+                        )
+
+                    with (
+                        sentry_sdk.start_transaction(
+                            transaction,
+                            custom_sampling_context={"asgi_scope": scope},
+                        )
+                        if transaction is not None
+                        else nullcontext()
+                    ):
+                        logger.debug("[ASGI] Started transaction: %s", transaction)
+                        try:
+
+                            async def _sentry_wrapped_send(event):
+                                # type: (Dict[str, Any]) -> Any
+                                if transaction is not None:
+                                    is_http_response = (
+                                        event.get("type") == "http.response.start"
+                                        and "status" in event
+                                    )
+                                    if is_http_response:
+                                        transaction.set_http_status(event["status"])
+
+                                return await send(event)
+
+                            if asgi_version == 2:
+                                return await self.app(scope)(
+                                    receive, _sentry_wrapped_send
+                                )
+                            else:
+                                return await self.app(
+                                    scope, receive, _sentry_wrapped_send
+                                )
+                        except Exception as exc:
+                            _capture_exception(exc, mechanism_type=self.mechanism_type)
+                            raise exc from None
+        finally:
+            _asgi_middleware_applied.set(False)
+
+    def event_processor(self, event, hint, asgi_scope):
+        # type: (Event, Hint, Any) -> Optional[Event]
+        request_data = event.get("request", {})
+        request_data.update(_get_request_data(asgi_scope))
+        event["request"] = deepcopy(request_data)
+
+        # Only set transaction name if not already set by Starlette or FastAPI (or other frameworks)
+        transaction = event.get("transaction")
+        transaction_source = (event.get("transaction_info") or {}).get("source")
+        already_set = (
+            transaction is not None
+            and transaction != _DEFAULT_TRANSACTION_NAME
+            and transaction_source
+            in [
+                TransactionSource.COMPONENT,
+                TransactionSource.ROUTE,
+                TransactionSource.CUSTOM,
+            ]
+        )
+        if not already_set:
+            name, source = self._get_transaction_name_and_source(
+                self.transaction_style, asgi_scope
+            )
+            event["transaction"] = name
+            event["transaction_info"] = {"source": source}
+
+            logger.debug(
+                "[ASGI] Set transaction name and source in event_processor: '%s' / '%s'",
+                event["transaction"],
+                event["transaction_info"]["source"],
+            )
+
+        return event
+
+    # Helper functions.
+    #
+    # Note: Those functions are not public API. If you want to mutate request
+    # data to your liking it's recommended to use the `before_send` callback
+    # for that.
+
+    def _get_transaction_name_and_source(self, transaction_style, asgi_scope):
+        # type: (SentryAsgiMiddleware, str, Any) -> Tuple[str, str]
+        name = None
+        source = SOURCE_FOR_STYLE[transaction_style]
+        ty = asgi_scope.get("type")
+
+        if transaction_style == "endpoint":
+            endpoint = asgi_scope.get("endpoint")
+            # Webframeworks like Starlette mutate the ASGI env once routing is
+            # done, which is sometime after the request has started. If we have
+            # an endpoint, overwrite our generic transaction name.
+            if endpoint:
+                name = transaction_from_function(endpoint) or ""
+            else:
+                name = _get_url(asgi_scope, "http" if ty == "http" else "ws", host=None)
+                source = TransactionSource.URL
+
+        elif transaction_style == "url":
+            # FastAPI includes the route object in the scope to let Sentry extract the
+            # path from it for the transaction name
+            route = asgi_scope.get("route")
+            if route:
+                path = getattr(route, "path", None)
+                if path is not None:
+                    name = path
+            else:
+                name = _get_url(asgi_scope, "http" if ty == "http" else "ws", host=None)
+                source = TransactionSource.URL
+
+        if name is None:
+            name = _DEFAULT_TRANSACTION_NAME
+            source = TransactionSource.ROUTE
+            return name, source
+
+        return name, source
diff --git a/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/asyncio.py b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/asyncio.py
new file mode 100644
index 00000000..9326c16e
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/asyncio.py
@@ -0,0 +1,144 @@
+import sys
+import signal
+
+import sentry_sdk
+from sentry_sdk.consts import OP
+from sentry_sdk.integrations import Integration, DidNotEnable
+from sentry_sdk.utils import event_from_exception, logger, reraise
+
+try:
+    import asyncio
+    from asyncio.tasks import Task
+except ImportError:
+    raise DidNotEnable("asyncio not available")
+
+from typing import cast, TYPE_CHECKING
+
+if TYPE_CHECKING:
+    from typing import Any
+    from collections.abc import Coroutine
+
+    from sentry_sdk._types import ExcInfo
+
+
+def get_name(coro):
+    # type: (Any) -> str
+    return (
+        getattr(coro, "__qualname__", None)
+        or getattr(coro, "__name__", None)
+        or "coroutine without __name__"
+    )
+
+
+def patch_asyncio():
+    # type: () -> None
+    orig_task_factory = None
+    try:
+        loop = asyncio.get_running_loop()
+        orig_task_factory = loop.get_task_factory()
+
+        # Add a shutdown handler to log a helpful message
+        def shutdown_handler():
+            # type: () -> None
+            logger.info(
+                "AsyncIO is shutting down. If you see 'Task was destroyed but it is pending!' "
+                "errors with '_task_with_sentry_span_creation', these are normal during shutdown "
+                "and not a problem with your code or Sentry."
+            )
+
+        try:
+            loop.add_signal_handler(signal.SIGINT, shutdown_handler)
+            loop.add_signal_handler(signal.SIGTERM, shutdown_handler)
+        except (NotImplementedError, AttributeError):
+            # Signal handlers might not be supported on all platforms
+            pass
+
+        def _sentry_task_factory(loop, coro, **kwargs):
+            # type: (asyncio.AbstractEventLoop, Coroutine[Any, Any, Any], Any) -> asyncio.Future[Any]
+
+            async def _task_with_sentry_span_creation():
+                # type: () -> Any
+                result = None
+
+                with sentry_sdk.isolation_scope():
+                    with sentry_sdk.start_span(
+                        op=OP.FUNCTION,
+                        name=get_name(coro),
+                        origin=AsyncioIntegration.origin,
+                    ):
+                        try:
+                            result = await coro
+                        except Exception:
+                            reraise(*_capture_exception())
+
+                return result
+
+            task = None
+
+            # Trying to use user set task factory (if there is one)
+            if orig_task_factory:
+                task = orig_task_factory(
+                    loop, _task_with_sentry_span_creation(), **kwargs
+                )
+
+            if task is None:
+                # The default task factory in `asyncio` does not have its own function
+                # but is just a couple of lines in `asyncio.base_events.create_task()`
+                # Those lines are copied here.
+
+                # WARNING:
+                # If the default behavior of the task creation in asyncio changes,
+                # this will break!
+                task = Task(_task_with_sentry_span_creation(), loop=loop, **kwargs)
+                if task._source_traceback:  # type: ignore
+                    del task._source_traceback[-1]  # type: ignore
+
+            # Set the task name to include the original coroutine's name
+            try:
+                cast("asyncio.Task[Any]", task).set_name(
+                    f"{get_name(coro)} (Sentry-wrapped)"
+                )
+            except AttributeError:
+                # set_name might not be available in all Python versions
+                pass
+
+            return task
+
+        loop.set_task_factory(_sentry_task_factory)  # type: ignore
+
+    except RuntimeError:
+        # When there is no running loop, we have nothing to patch.
+        logger.warning(
+            "There is no running asyncio loop so there is nothing Sentry can patch. "
+            "Please make sure you call sentry_sdk.init() within a running "
+            "asyncio loop for the AsyncioIntegration to work. "
+            "See https://docs.sentry.io/platforms/python/integrations/asyncio/"
+        )
+
+
+def _capture_exception():
+    # type: () -> ExcInfo
+    exc_info = sys.exc_info()
+
+    client = sentry_sdk.get_client()
+
+    integration = client.get_integration(AsyncioIntegration)
+    if integration is not None:
+        event, hint = event_from_exception(
+            exc_info,
+            client_options=client.options,
+            mechanism={"type": "asyncio", "handled": False},
+        )
+        sentry_sdk.capture_event(event, hint=hint)
+
+    return exc_info
+
+
+class AsyncioIntegration(Integration):
+    identifier = "asyncio"
+    origin = f"auto.function.{identifier}"
+
+    @staticmethod
+    def setup_once():
+        # type: () -> None
+        patch_asyncio()
diff --git a/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/asyncpg.py b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/asyncpg.py
new file mode 100644
index 00000000..b6b53f46
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/asyncpg.py
@@ -0,0 +1,208 @@
+from __future__ import annotations
+import contextlib
+from typing import Any, TypeVar, Callable, Awaitable, Iterator
+
+import sentry_sdk
+from sentry_sdk.consts import OP, SPANDATA
+from sentry_sdk.integrations import _check_minimum_version, Integration, DidNotEnable
+from sentry_sdk.tracing import Span
+from sentry_sdk.tracing_utils import add_query_source, record_sql_queries
+from sentry_sdk.utils import (
+    ensure_integration_enabled,
+    parse_version,
+    capture_internal_exceptions,
+)
+
+try:
+    import asyncpg  # type: ignore[import-not-found]
+    from asyncpg.cursor import BaseCursor  # type: ignore
+
+except ImportError:
+    raise DidNotEnable("asyncpg not installed.")
+
+
+class AsyncPGIntegration(Integration):
+    identifier = "asyncpg"
+    origin = f"auto.db.{identifier}"
+    _record_params = False
+
+    def __init__(self, *, record_params: bool = False):
+        AsyncPGIntegration._record_params = record_params
+
+    @staticmethod
+    def setup_once() -> None:
+        # asyncpg.__version__ is a string containing the semantic version in the form of "<major>.<minor>.<patch>"
+        asyncpg_version = parse_version(asyncpg.__version__)
+        _check_minimum_version(AsyncPGIntegration, asyncpg_version)
+
+        asyncpg.Connection.execute = _wrap_execute(
+            asyncpg.Connection.execute,
+        )
+
+        asyncpg.Connection._execute = _wrap_connection_method(
+            asyncpg.Connection._execute
+        )
+        asyncpg.Connection._executemany = _wrap_connection_method(
+            asyncpg.Connection._executemany, executemany=True
+        )
+        asyncpg.Connection.cursor = _wrap_cursor_creation(asyncpg.Connection.cursor)
+        asyncpg.Connection.prepare = _wrap_connection_method(asyncpg.Connection.prepare)
+        asyncpg.connect_utils._connect_addr = _wrap_connect_addr(
+            asyncpg.connect_utils._connect_addr
+        )
+
+
+T = TypeVar("T")
+
+
+def _wrap_execute(f: Callable[..., Awaitable[T]]) -> Callable[..., Awaitable[T]]:
+    async def _inner(*args: Any, **kwargs: Any) -> T:
+        if sentry_sdk.get_client().get_integration(AsyncPGIntegration) is None:
+            return await f(*args, **kwargs)
+
+        # Avoid recording calls to _execute twice.
+        # Calls to Connection.execute with args also call
+        # Connection._execute, which is recorded separately
+        # args[0] = the connection object, args[1] is the query
+        if len(args) > 2:
+            return await f(*args, **kwargs)
+
+        query = args[1]
+        with record_sql_queries(
+            cursor=None,
+            query=query,
+            params_list=None,
+            paramstyle=None,
+            executemany=False,
+            span_origin=AsyncPGIntegration.origin,
+        ) as span:
+            res = await f(*args, **kwargs)
+
+        with capture_internal_exceptions():
+            add_query_source(span)
+
+        return res
+
+    return _inner
+
+
+SubCursor = TypeVar("SubCursor", bound=BaseCursor)
+
+
+@contextlib.contextmanager
+def _record(
+    cursor: SubCursor | None,
+    query: str,
+    params_list: tuple[Any, ...] | None,
+    *,
+    executemany: bool = False,
+) -> Iterator[Span]:
+    integration = sentry_sdk.get_client().get_integration(AsyncPGIntegration)
+    if integration is not None and not integration._record_params:
+        params_list = None
+
+    param_style = "pyformat" if params_list else None
+
+    with record_sql_queries(
+        cursor=cursor,
+        query=query,
+        params_list=params_list,
+        paramstyle=param_style,
+        executemany=executemany,
+        record_cursor_repr=cursor is not None,
+        span_origin=AsyncPGIntegration.origin,
+    ) as span:
+        yield span
+
+
+def _wrap_connection_method(
+    f: Callable[..., Awaitable[T]], *, executemany: bool = False
+) -> Callable[..., Awaitable[T]]:
+    async def _inner(*args: Any, **kwargs: Any) -> T:
+        if sentry_sdk.get_client().get_integration(AsyncPGIntegration) is None:
+            return await f(*args, **kwargs)
+        query = args[1]
+        params_list = args[2] if len(args) > 2 else None
+        with _record(None, query, params_list, executemany=executemany) as span:
+            _set_db_data(span, args[0])
+            res = await f(*args, **kwargs)
+
+        return res
+
+    return _inner
+
+
+def _wrap_cursor_creation(f: Callable[..., T]) -> Callable[..., T]:
+    @ensure_integration_enabled(AsyncPGIntegration, f)
+    def _inner(*args: Any, **kwargs: Any) -> T:  # noqa: N807
+        query = args[1]
+        params_list = args[2] if len(args) > 2 else None
+
+        with _record(
+            None,
+            query,
+            params_list,
+            executemany=False,
+        ) as span:
+            _set_db_data(span, args[0])
+            res = f(*args, **kwargs)
+            span.set_data("db.cursor", res)
+
+        return res
+
+    return _inner
+
+
+def _wrap_connect_addr(f: Callable[..., Awaitable[T]]) -> Callable[..., Awaitable[T]]:
+    async def _inner(*args: Any, **kwargs: Any) -> T:
+        if sentry_sdk.get_client().get_integration(AsyncPGIntegration) is None:
+            return await f(*args, **kwargs)
+
+        user = kwargs["params"].user
+        database = kwargs["params"].database
+
+        with sentry_sdk.start_span(
+            op=OP.DB,
+            name="connect",
+            origin=AsyncPGIntegration.origin,
+        ) as span:
+            span.set_data(SPANDATA.DB_SYSTEM, "postgresql")
+            addr = kwargs.get("addr")
+            if addr:
+                try:
+                    span.set_data(SPANDATA.SERVER_ADDRESS, addr[0])
+                    span.set_data(SPANDATA.SERVER_PORT, addr[1])
+                except IndexError:
+                    pass
+            span.set_data(SPANDATA.DB_NAME, database)
+            span.set_data(SPANDATA.DB_USER, user)
+
+            with capture_internal_exceptions():
+                sentry_sdk.add_breadcrumb(
+                    message="connect", category="query", data=span._data
+                )
+            res = await f(*args, **kwargs)
+
+        return res
+
+    return _inner
+
+
+def _set_db_data(span: Span, conn: Any) -> None:
+    span.set_data(SPANDATA.DB_SYSTEM, "postgresql")
+
+    addr = conn._addr
+    if addr:
+        try:
+            span.set_data(SPANDATA.SERVER_ADDRESS, addr[0])
+            span.set_data(SPANDATA.SERVER_PORT, addr[1])
+        except IndexError:
+            pass
+
+    database = conn._params.database
+    if database:
+        span.set_data(SPANDATA.DB_NAME, database)
+
+    user = conn._params.user
+    if user:
+        span.set_data(SPANDATA.DB_USER, user)
diff --git a/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/atexit.py b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/atexit.py
new file mode 100644
index 00000000..dfc6d08e
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/atexit.py
@@ -0,0 +1,57 @@
+import os
+import sys
+import atexit
+
+import sentry_sdk
+from sentry_sdk.utils import logger
+from sentry_sdk.integrations import Integration
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+    from typing import Any
+    from typing import Optional
+
+
+def default_callback(pending, timeout):
+    # type: (int, int) -> None
+    """This is the default shutdown callback that is set on the options.
+    It prints out a message to stderr that informs the user that some events
+    are still pending and the process is waiting for them to flush out.
+    """
+
+    def echo(msg):
+        # type: (str) -> None
+        sys.stderr.write(msg + "\n")
+
+    echo("Sentry is attempting to send %i pending events" % pending)
+    echo("Waiting up to %s seconds" % timeout)
+    echo("Press Ctrl-%s to quit" % (os.name == "nt" and "Break" or "C"))
+    sys.stderr.flush()
+
+
+class AtexitIntegration(Integration):
+    identifier = "atexit"
+
+    def __init__(self, callback=None):
+        # type: (Optional[Any]) -> None
+        if callback is None:
+            callback = default_callback
+        self.callback = callback
+
+    @staticmethod
+    def setup_once():
+        # type: () -> None
+        @atexit.register
+        def _shutdown():
+            # type: () -> None
+            client = sentry_sdk.get_client()
+            integration = client.get_integration(AtexitIntegration)
+
+            if integration is None:
+                return
+
+            logger.debug("atexit: got shutdown signal")
+            logger.debug("atexit: shutting down client")
+            sentry_sdk.get_isolation_scope().end_session()
+
+            client.close(callback=integration.callback)
diff --git a/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/aws_lambda.py b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/aws_lambda.py
new file mode 100644
index 00000000..4990fd6e
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/aws_lambda.py
@@ -0,0 +1,499 @@
+import functools
+import json
+import re
+import sys
+from copy import deepcopy
+from datetime import datetime, timedelta, timezone
+from os import environ
+
+import sentry_sdk
+from sentry_sdk.api import continue_trace
+from sentry_sdk.consts import OP
+from sentry_sdk.scope import should_send_default_pii
+from sentry_sdk.tracing import TransactionSource
+from sentry_sdk.utils import (
+    AnnotatedValue,
+    capture_internal_exceptions,
+    ensure_integration_enabled,
+    event_from_exception,
+    logger,
+    TimeoutThread,
+    reraise,
+)
+from sentry_sdk.integrations import Integration
+from sentry_sdk.integrations._wsgi_common import _filter_headers
+
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+    from typing import Any
+    from typing import TypeVar
+    from typing import Callable
+    from typing import Optional
+
+    from sentry_sdk._types import EventProcessor, Event, Hint
+
+    F = TypeVar("F", bound=Callable[..., Any])
+
+# Constants
+TIMEOUT_WARNING_BUFFER = 1500  # Buffer time required to send timeout warning to Sentry
+MILLIS_TO_SECONDS = 1000.0
+
+
+def _wrap_init_error(init_error):
+    # type: (F) -> F
+    @ensure_integration_enabled(AwsLambdaIntegration, init_error)
+    def sentry_init_error(*args, **kwargs):
+        # type: (*Any, **Any) -> Any
+        client = sentry_sdk.get_client()
+
+        with capture_internal_exceptions():
+            sentry_sdk.get_isolation_scope().clear_breadcrumbs()
+
+            exc_info = sys.exc_info()
+            if exc_info and all(exc_info):
+                sentry_event, hint = event_from_exception(
+                    exc_info,
+                    client_options=client.options,
+                    mechanism={"type": "aws_lambda", "handled": False},
+                )
+                sentry_sdk.capture_event(sentry_event, hint=hint)
+
+            else:
+                # Fall back to AWS lambdas JSON representation of the error
+                error_info = args[1]
+                if isinstance(error_info, str):
+                    error_info = json.loads(error_info)
+                sentry_event = _event_from_error_json(error_info)
+                sentry_sdk.capture_event(sentry_event)
+
+        return init_error(*args, **kwargs)
+
+    return sentry_init_error  # type: ignore
+
+
+def _wrap_handler(handler):
+    # type: (F) -> F
+    @functools.wraps(handler)
+    def sentry_handler(aws_event, aws_context, *args, **kwargs):
+        # type: (Any, Any, *Any, **Any) -> Any
+
+        # Per https://docs.aws.amazon.com/lambda/latest/dg/python-handler.html,
+        # `event` here is *likely* a dictionary, but also might be a number of
+        # other types (str, int, float, None).
+        #
+        # In some cases, it is a list (if the user is batch-invoking their
+        # function, for example), in which case we'll use the first entry as a
+        # representative from which to try pulling request data. (Presumably it
+        # will be the same for all events in the list, since they're all hitting
+        # the lambda in the same request.)
+
+        client = sentry_sdk.get_client()
+        integration = client.get_integration(AwsLambdaIntegration)
+
+        if integration is None:
+            return handler(aws_event, aws_context, *args, **kwargs)
+
+        if isinstance(aws_event, list) and len(aws_event) >= 1:
+            request_data = aws_event[0]
+            batch_size = len(aws_event)
+        else:
+            request_data = aws_event
+            batch_size = 1
+
+        if not isinstance(request_data, dict):
+            # If we're not dealing with a dictionary, we won't be able to get
+            # headers, path, http method, etc in any case, so it's fine that
+            # this is empty
+            request_data = {}
+
+        configured_time = aws_context.get_remaining_time_in_millis()
+
+        with sentry_sdk.isolation_scope() as scope:
+            timeout_thread = None
+            with capture_internal_exceptions():
+                scope.clear_breadcrumbs()
+                scope.add_event_processor(
+                    _make_request_event_processor(
+                        request_data, aws_context, configured_time
+                    )
+                )
+                scope.set_tag(
+                    "aws_region", aws_context.invoked_function_arn.split(":")[3]
+                )
+                if batch_size > 1:
+                    scope.set_tag("batch_request", True)
+                    scope.set_tag("batch_size", batch_size)
+
+                # Starting the Timeout thread only if the configured time is greater than Timeout warning
+                # buffer and timeout_warning parameter is set True.
+                if (
+                    integration.timeout_warning
+                    and configured_time > TIMEOUT_WARNING_BUFFER
+                ):
+                    waiting_time = (
+                        configured_time - TIMEOUT_WARNING_BUFFER
+                    ) / MILLIS_TO_SECONDS
+
+                    timeout_thread = TimeoutThread(
+                        waiting_time,
+                        configured_time / MILLIS_TO_SECONDS,
+                    )
+
+                    # Starting the thread to raise timeout warning exception
+                    timeout_thread.start()
+
+            headers = request_data.get("headers", {})
+            # Some AWS Services (ie. EventBridge) set headers as a list
+            # or None, so we must ensure it is a dict
+            if not isinstance(headers, dict):
+                headers = {}
+
+            transaction = continue_trace(
+                headers,
+                op=OP.FUNCTION_AWS,
+                name=aws_context.function_name,
+                source=TransactionSource.COMPONENT,
+                origin=AwsLambdaIntegration.origin,
+            )
+            with sentry_sdk.start_transaction(
+                transaction,
+                custom_sampling_context={
+                    "aws_event": aws_event,
+                    "aws_context": aws_context,
+                },
+            ):
+                try:
+                    return handler(aws_event, aws_context, *args, **kwargs)
+                except Exception:
+                    exc_info = sys.exc_info()
+                    sentry_event, hint = event_from_exception(
+                        exc_info,
+                        client_options=client.options,
+                        mechanism={"type": "aws_lambda", "handled": False},
+                    )
+                    sentry_sdk.capture_event(sentry_event, hint=hint)
+                    reraise(*exc_info)
+                finally:
+                    if timeout_thread:
+                        timeout_thread.stop()
+
+    return sentry_handler  # type: ignore
+
+
+def _drain_queue():
+    # type: () -> None
+    with capture_internal_exceptions():
+        client = sentry_sdk.get_client()
+        integration = client.get_integration(AwsLambdaIntegration)
+        if integration is not None:
+            # Flush out the event queue before AWS kills the
+            # process.
+            client.flush()
+
+
+class AwsLambdaIntegration(Integration):
+    identifier = "aws_lambda"
+    origin = f"auto.function.{identifier}"
+
+    def __init__(self, timeout_warning=False):
+        # type: (bool) -> None
+        self.timeout_warning = timeout_warning
+
+    @staticmethod
+    def setup_once():
+        # type: () -> None
+
+        lambda_bootstrap = get_lambda_bootstrap()
+        if not lambda_bootstrap:
+            logger.warning(
+                "Not running in AWS Lambda environment, "
+                "AwsLambdaIntegration disabled (could not find bootstrap module)"
+            )
+            return
+
+        if not hasattr(lambda_bootstrap, "handle_event_request"):
+            logger.warning(
+                "Not running in AWS Lambda environment, "
+                "AwsLambdaIntegration disabled (could not find handle_event_request)"
+            )
+            return
+
+        pre_37 = hasattr(lambda_bootstrap, "handle_http_request")  # Python 3.6
+
+        if pre_37:
+            old_handle_event_request = lambda_bootstrap.handle_event_request
+
+            def sentry_handle_event_request(request_handler, *args, **kwargs):
+                # type: (Any, *Any, **Any) -> Any
+                request_handler = _wrap_handler(request_handler)
+                return old_handle_event_request(request_handler, *args, **kwargs)
+
+            lambda_bootstrap.handle_event_request = sentry_handle_event_request
+
+            old_handle_http_request = lambda_bootstrap.handle_http_request
+
+            def sentry_handle_http_request(request_handler, *args, **kwargs):
+                # type: (Any, *Any, **Any) -> Any
+                request_handler = _wrap_handler(request_handler)
+                return old_handle_http_request(request_handler, *args, **kwargs)
+
+            lambda_bootstrap.handle_http_request = sentry_handle_http_request
+
+            # Patch to_json to drain the queue. This should work even when the
+            # SDK is initialized inside of the handler
+
+            old_to_json = lambda_bootstrap.to_json
+
+            def sentry_to_json(*args, **kwargs):
+                # type: (*Any, **Any) -> Any
+                _drain_queue()
+                return old_to_json(*args, **kwargs)
+
+            lambda_bootstrap.to_json = sentry_to_json
+        else:
+            lambda_bootstrap.LambdaRuntimeClient.post_init_error = _wrap_init_error(
+                lambda_bootstrap.LambdaRuntimeClient.post_init_error
+            )
+
+            old_handle_event_request = lambda_bootstrap.handle_event_request
+
+            def sentry_handle_event_request(  # type: ignore
+                lambda_runtime_client, request_handler, *args, **kwargs
+            ):
+                request_handler = _wrap_handler(request_handler)
+                return old_handle_event_request(
+                    lambda_runtime_client, request_handler, *args, **kwargs
+                )
+
+            lambda_bootstrap.handle_event_request = sentry_handle_event_request
+
+            # Patch the runtime client to drain the queue. This should work
+            # even when the SDK is initialized inside of the handler
+
+            def _wrap_post_function(f):
+                # type: (F) -> F
+                def inner(*args, **kwargs):
+                    # type: (*Any, **Any) -> Any
+                    _drain_queue()
+                    return f(*args, **kwargs)
+
+                return inner  # type: ignore
+
+            lambda_bootstrap.LambdaRuntimeClient.post_invocation_result = (
+                _wrap_post_function(
+                    lambda_bootstrap.LambdaRuntimeClient.post_invocation_result
+                )
+            )
+            lambda_bootstrap.LambdaRuntimeClient.post_invocation_error = (
+                _wrap_post_function(
+                    lambda_bootstrap.LambdaRuntimeClient.post_invocation_error
+                )
+            )
+
+
+def get_lambda_bootstrap():
+    # type: () -> Optional[Any]
+
+    # Python 3.7: If the bootstrap module is *already imported*, it is the
+    # one we actually want to use (no idea what's in __main__)
+    #
+    # Python 3.8: bootstrap is also importable, but will be the same file
+    # as __main__ imported under a different name:
+    #
+    #     sys.modules['__main__'].__file__ == sys.modules['bootstrap'].__file__
+    #     sys.modules['__main__'] is not sys.modules['bootstrap']
+    #
+    # Python 3.9: bootstrap is in __main__.awslambdaricmain
+    #
+    # On container builds using the `aws-lambda-python-runtime-interface-client`
+    # (awslamdaric) module, bootstrap is located in sys.modules['__main__'].bootstrap
+    #
+    # Such a setup would then make all monkeypatches useless.
+    if "bootstrap" in sys.modules:
+        return sys.modules["bootstrap"]
+    elif "__main__" in sys.modules:
+        module = sys.modules["__main__"]
+        # python3.9 runtime
+        if hasattr(module, "awslambdaricmain") and hasattr(
+            module.awslambdaricmain, "bootstrap"
+        ):
+            return module.awslambdaricmain.bootstrap
+        elif hasattr(module, "bootstrap"):
+            # awslambdaric python module in container builds
+            return module.bootstrap
+
+        # python3.8 runtime
+        return module
+    else:
+        return None
+
+
+def _make_request_event_processor(aws_event, aws_context, configured_timeout):
+    # type: (Any, Any, Any) -> EventProcessor
+    start_time = datetime.now(timezone.utc)
+
+    def event_processor(sentry_event, hint, start_time=start_time):
+        # type: (Event, Hint, datetime) -> Optional[Event]
+        remaining_time_in_milis = aws_context.get_remaining_time_in_millis()
+        exec_duration = configured_timeout - remaining_time_in_milis
+
+        extra = sentry_event.setdefault("extra", {})
+        extra["lambda"] = {
+            "function_name": aws_context.function_name,
+            "function_version": aws_context.function_version,
+            "invoked_function_arn": aws_context.invoked_function_arn,
+            "aws_request_id": aws_context.aws_request_id,
+            "execution_duration_in_millis": exec_duration,
+            "remaining_time_in_millis": remaining_time_in_milis,
+        }
+
+        extra["cloudwatch logs"] = {
+            "url": _get_cloudwatch_logs_url(aws_context, start_time),
+            "log_group": aws_context.log_group_name,
+            "log_stream": aws_context.log_stream_name,
+        }
+
+        request = sentry_event.get("request", {})
+
+        if "httpMethod" in aws_event:
+            request["method"] = aws_event["httpMethod"]
+
+        request["url"] = _get_url(aws_event, aws_context)
+
+        if "queryStringParameters" in aws_event:
+            request["query_string"] = aws_event["queryStringParameters"]
+
+        if "headers" in aws_event:
+            request["headers"] = _filter_headers(aws_event["headers"])
+
+        if should_send_default_pii():
+            user_info = sentry_event.setdefault("user", {})
+
+            identity = aws_event.get("identity")
+            if identity is None:
+                identity = {}
+
+            id = identity.get("userArn")
+            if id is not None:
+                user_info.setdefault("id", id)
+
+            ip = identity.get("sourceIp")
+            if ip is not None:
+                user_info.setdefault("ip_address", ip)
+
+            if "body" in aws_event:
+                request["data"] = aws_event.get("body", "")
+        else:
+            if aws_event.get("body", None):
+                # Unfortunately couldn't find a way to get structured body from AWS
+                # event. Meaning every body is unstructured to us.
+                request["data"] = AnnotatedValue.removed_because_raw_data()
+
+        sentry_event["request"] = deepcopy(request)
+
+        return sentry_event
+
+    return event_processor
+
+
+def _get_url(aws_event, aws_context):
+    # type: (Any, Any) -> str
+    path = aws_event.get("path", None)
+
+    headers = aws_event.get("headers")
+    if headers is None:
+        headers = {}
+
+    host = headers.get("Host", None)
+    proto = headers.get("X-Forwarded-Proto", None)
+    if proto and host and path:
+        return "{}://{}{}".format(proto, host, path)
+    return "awslambda:///{}".format(aws_context.function_name)
+
+
+def _get_cloudwatch_logs_url(aws_context, start_time):
+    # type: (Any, datetime) -> str
+    """
+    Generates a CloudWatchLogs console URL based on the context object
+
+    Arguments:
+        aws_context {Any} -- context from lambda handler
+
+    Returns:
+        str -- AWS Console URL to logs.
+    """
+    formatstring = "%Y-%m-%dT%H:%M:%SZ"
+    region = environ.get("AWS_REGION", "")
+
+    url = (
+        "https://console.{domain}/cloudwatch/home?region={region}"
+        "#logEventViewer:group={log_group};stream={log_stream}"
+        ";start={start_time};end={end_time}"
+    ).format(
+        domain="amazonaws.cn" if region.startswith("cn-") else "aws.amazon.com",
+        region=region,
+        log_group=aws_context.log_group_name,
+        log_stream=aws_context.log_stream_name,
+        start_time=(start_time - timedelta(seconds=1)).strftime(formatstring),
+        end_time=(datetime.now(timezone.utc) + timedelta(seconds=2)).strftime(
+            formatstring
+        ),
+    )
+
+    return url
+
+
+def _parse_formatted_traceback(formatted_tb):
+    # type: (list[str]) -> list[dict[str, Any]]
+    frames = []
+    for frame in formatted_tb:
+        match = re.match(r'File "(.+)", line (\d+), in (.+)', frame.strip())
+        if match:
+            file_name, line_number, func_name = match.groups()
+            line_number = int(line_number)
+            frames.append(
+                {
+                    "filename": file_name,
+                    "function": func_name,
+                    "lineno": line_number,
+                    "vars": None,
+                    "pre_context": None,
+                    "context_line": None,
+                    "post_context": None,
+                }
+            )
+    return frames
+
+
+def _event_from_error_json(error_json):
+    # type: (dict[str, Any]) -> Event
+    """
+    Converts the error JSON from AWS Lambda into a Sentry error event.
+    This is not a full fletched event, but better than nothing.
+
+    This is an example of where AWS creates the error JSON:
+    https://github.com/aws/aws-lambda-python-runtime-interface-client/blob/2.2.1/awslambdaric/bootstrap.py#L479
+    """
+    event = {
+        "level": "error",
+        "exception": {
+            "values": [
+                {
+                    "type": error_json.get("errorType"),
+                    "value": error_json.get("errorMessage"),
+                    "stacktrace": {
+                        "frames": _parse_formatted_traceback(
+                            error_json.get("stackTrace", [])
+                        ),
+                    },
+                    "mechanism": {
+                        "type": "aws_lambda",
+                        "handled": False,
+                    },
+                }
+            ],
+        },
+    }  # type: Event
+
+    return event
diff --git a/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/beam.py b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/beam.py
new file mode 100644
index 00000000..a2e4553f
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/beam.py
@@ -0,0 +1,176 @@
+import sys
+import types
+from functools import wraps
+
+import sentry_sdk
+from sentry_sdk.integrations import Integration
+from sentry_sdk.integrations.logging import ignore_logger
+from sentry_sdk.utils import (
+    capture_internal_exceptions,
+    ensure_integration_enabled,
+    event_from_exception,
+    reraise,
+)
+
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+    from typing import Any
+    from typing import Iterator
+    from typing import TypeVar
+    from typing import Callable
+
+    from sentry_sdk._types import ExcInfo
+
+    T = TypeVar("T")
+    F = TypeVar("F", bound=Callable[..., Any])
+
+
+WRAPPED_FUNC = "_wrapped_{}_"
+INSPECT_FUNC = "_inspect_{}"  # Required format per apache_beam/transforms/core.py
+USED_FUNC = "_sentry_used_"
+
+
+class BeamIntegration(Integration):
+    identifier = "beam"
+
+    @staticmethod
+    def setup_once():
+        # type: () -> None
+        from apache_beam.transforms.core import DoFn, ParDo  # type: ignore
+
+        ignore_logger("root")
+        ignore_logger("bundle_processor.create")
+
+        function_patches = ["process", "start_bundle", "finish_bundle", "setup"]
+        for func_name in function_patches:
+            setattr(
+                DoFn,
+                INSPECT_FUNC.format(func_name),
+                _wrap_inspect_call(DoFn, func_name),
+            )
+
+        old_init = ParDo.__init__
+
+        def sentry_init_pardo(self, fn, *args, **kwargs):
+            # type: (ParDo, Any, *Any, **Any) -> Any
+            # Do not monkey patch init twice
+            if not getattr(self, "_sentry_is_patched", False):
+                for func_name in function_patches:
+                    if not hasattr(fn, func_name):
+                        continue
+                    wrapped_func = WRAPPED_FUNC.format(func_name)
+
+                    # Check to see if inspect is set and process is not
+                    # to avoid monkey patching process twice.
+                    # Check to see if function is part of object for
+                    # backwards compatibility.
+                    process_func = getattr(fn, func_name)
+                    inspect_func = getattr(fn, INSPECT_FUNC.format(func_name))
+                    if not getattr(inspect_func, USED_FUNC, False) and not getattr(
+                        process_func, USED_FUNC, False
+                    ):
+                        setattr(fn, wrapped_func, process_func)
+                        setattr(fn, func_name, _wrap_task_call(process_func))
+
+                self._sentry_is_patched = True
+            old_init(self, fn, *args, **kwargs)
+
+        ParDo.__init__ = sentry_init_pardo
+
+
+def _wrap_inspect_call(cls, func_name):
+    # type: (Any, Any) -> Any
+
+    if not hasattr(cls, func_name):
+        return None
+
+    def _inspect(self):
+        # type: (Any) -> Any
+        """
+        Inspect function overrides the way Beam gets argspec.
+        """
+        wrapped_func = WRAPPED_FUNC.format(func_name)
+        if hasattr(self, wrapped_func):
+            process_func = getattr(self, wrapped_func)
+        else:
+            process_func = getattr(self, func_name)
+            setattr(self, func_name, _wrap_task_call(process_func))
+            setattr(self, wrapped_func, process_func)
+
+        # getfullargspec is deprecated in more recent beam versions and get_function_args_defaults
+        # (which uses Signatures internally) should be used instead.
+        try:
+            from apache_beam.transforms.core import get_function_args_defaults
+
+            return get_function_args_defaults(process_func)
+        except ImportError:
+            from apache_beam.typehints.decorators import getfullargspec  # type: ignore
+
+            return getfullargspec(process_func)
+
+    setattr(_inspect, USED_FUNC, True)
+    return _inspect
+
+
+def _wrap_task_call(func):
+    # type: (F) -> F
+    """
+    Wrap task call with a try catch to get exceptions.
+    """
+
+    @wraps(func)
+    def _inner(*args, **kwargs):
+        # type: (*Any, **Any) -> Any
+        try:
+            gen = func(*args, **kwargs)
+        except Exception:
+            raise_exception()
+
+        if not isinstance(gen, types.GeneratorType):
+            return gen
+        return _wrap_generator_call(gen)
+
+    setattr(_inner, USED_FUNC, True)
+    return _inner  # type: ignore
+
+
+@ensure_integration_enabled(BeamIntegration)
+def _capture_exception(exc_info):
+    # type: (ExcInfo) -> None
+    """
+    Send Beam exception to Sentry.
+    """
+    client = sentry_sdk.get_client()
+
+    event, hint = event_from_exception(
+        exc_info,
+        client_options=client.options,
+        mechanism={"type": "beam", "handled": False},
+    )
+    sentry_sdk.capture_event(event, hint=hint)
+
+
+def raise_exception():
+    # type: () -> None
+    """
+    Raise an exception.
+    """
+    exc_info = sys.exc_info()
+    with capture_internal_exceptions():
+        _capture_exception(exc_info)
+    reraise(*exc_info)
+
+
+def _wrap_generator_call(gen):
+    # type: (Iterator[T]) -> Iterator[T]
+    """
+    Wrap the generator to handle any failures.
+    """
+    while True:
+        try:
+            yield next(gen)
+        except StopIteration:
+            break
+        except Exception:
+            raise_exception()
diff --git a/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/boto3.py b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/boto3.py
new file mode 100644
index 00000000..0207341f
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/boto3.py
@@ -0,0 +1,137 @@
+from functools import partial
+
+import sentry_sdk
+from sentry_sdk.consts import OP, SPANDATA
+from sentry_sdk.integrations import _check_minimum_version, Integration, DidNotEnable
+from sentry_sdk.tracing import Span
+from sentry_sdk.utils import (
+    capture_internal_exceptions,
+    ensure_integration_enabled,
+    parse_url,
+    parse_version,
+)
+
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+    from typing import Any
+    from typing import Dict
+    from typing import Optional
+    from typing import Type
+
+try:
+    from botocore import __version__ as BOTOCORE_VERSION  # type: ignore
+    from botocore.client import BaseClient  # type: ignore
+    from botocore.response import StreamingBody  # type: ignore
+    from botocore.awsrequest import AWSRequest  # type: ignore
+except ImportError:
+    raise DidNotEnable("botocore is not installed")
+
+
+class Boto3Integration(Integration):
+    identifier = "boto3"
+    origin = f"auto.http.{identifier}"
+
+    @staticmethod
+    def setup_once():
+        # type: () -> None
+        version = parse_version(BOTOCORE_VERSION)
+        _check_minimum_version(Boto3Integration, version, "botocore")
+
+        orig_init = BaseClient.__init__
+
+        def sentry_patched_init(self, *args, **kwargs):
+            # type: (Type[BaseClient], *Any, **Any) -> None
+            orig_init(self, *args, **kwargs)
+            meta = self.meta
+            service_id = meta.service_model.service_id.hyphenize()
+            meta.events.register(
+                "request-created",
+                partial(_sentry_request_created, service_id=service_id),
+            )
+            meta.events.register("after-call", _sentry_after_call)
+            meta.events.register("after-call-error", _sentry_after_call_error)
+
+        BaseClient.__init__ = sentry_patched_init
+
+
+@ensure_integration_enabled(Boto3Integration)
+def _sentry_request_created(service_id, request, operation_name, **kwargs):
+    # type: (str, AWSRequest, str, **Any) -> None
+    description = "aws.%s.%s" % (service_id, operation_name)
+    span = sentry_sdk.start_span(
+        op=OP.HTTP_CLIENT,
+        name=description,
+        origin=Boto3Integration.origin,
+    )
+
+    with capture_internal_exceptions():
+        parsed_url = parse_url(request.url, sanitize=False)
+        span.set_data("aws.request.url", parsed_url.url)
+        span.set_data(SPANDATA.HTTP_QUERY, parsed_url.query)
+        span.set_data(SPANDATA.HTTP_FRAGMENT, parsed_url.fragment)
+
+    span.set_tag("aws.service_id", service_id)
+    span.set_tag("aws.operation_name", operation_name)
+    span.set_data(SPANDATA.HTTP_METHOD, request.method)
+
+    # We do it in order for subsequent http calls/retries be
+    # attached to this span.
+    span.__enter__()
+
+    # request.context is an open-ended data-structure
+    # where we can add anything useful in request life cycle.
+    request.context["_sentrysdk_span"] = span
+
+
+def _sentry_after_call(context, parsed, **kwargs):
+    # type: (Dict[str, Any], Dict[str, Any], **Any) -> None
+    span = context.pop("_sentrysdk_span", None)  # type: Optional[Span]
+
+    # Span could be absent if the integration is disabled.
+    if span is None:
+        return
+    span.__exit__(None, None, None)
+
+    body = parsed.get("Body")
+    if not isinstance(body, StreamingBody):
+        return
+
+    streaming_span = span.start_child(
+        op=OP.HTTP_CLIENT_STREAM,
+        name=span.description,
+        origin=Boto3Integration.origin,
+    )
+
+    orig_read = body.read
+    orig_close = body.close
+
+    def sentry_streaming_body_read(*args, **kwargs):
+        # type: (*Any, **Any) -> bytes
+        try:
+            ret = orig_read(*args, **kwargs)
+            if not ret:
+                streaming_span.finish()
+            return ret
+        except Exception:
+            streaming_span.finish()
+            raise
+
+    body.read = sentry_streaming_body_read
+
+    def sentry_streaming_body_close(*args, **kwargs):
+        # type: (*Any, **Any) -> None
+        streaming_span.finish()
+        orig_close(*args, **kwargs)
+
+    body.close = sentry_streaming_body_close
+
+
+def _sentry_after_call_error(context, exception, **kwargs):
+    # type: (Dict[str, Any], Type[BaseException], **Any) -> None
+    span = context.pop("_sentrysdk_span", None)  # type: Optional[Span]
+
+    # Span could be absent if the integration is disabled.
+    if span is None:
+        return
+    span.__exit__(type(exception), exception, None)
diff --git a/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/bottle.py b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/bottle.py
new file mode 100644
index 00000000..8a9fc412
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/bottle.py
@@ -0,0 +1,221 @@
+import functools
+
+import sentry_sdk
+from sentry_sdk.tracing import SOURCE_FOR_STYLE
+from sentry_sdk.utils import (
+    capture_internal_exceptions,
+    ensure_integration_enabled,
+    event_from_exception,
+    parse_version,
+    transaction_from_function,
+)
+from sentry_sdk.integrations import (
+    Integration,
+    DidNotEnable,
+    _DEFAULT_FAILED_REQUEST_STATUS_CODES,
+    _check_minimum_version,
+)
+from sentry_sdk.integrations.wsgi import SentryWsgiMiddleware
+from sentry_sdk.integrations._wsgi_common import RequestExtractor
+
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+    from collections.abc import Set
+
+    from sentry_sdk.integrations.wsgi import _ScopedResponse
+    from typing import Any
+    from typing import Dict
+    from typing import Callable
+    from typing import Optional
+    from bottle import FileUpload, FormsDict, LocalRequest  # type: ignore
+
+    from sentry_sdk._types import EventProcessor, Event
+
+try:
+    from bottle import (
+        Bottle,
+        HTTPResponse,
+        Route,
+        request as bottle_request,
+        __version__ as BOTTLE_VERSION,
+    )
+except ImportError:
+    raise DidNotEnable("Bottle not installed")
+
+
+TRANSACTION_STYLE_VALUES = ("endpoint", "url")
+
+
+class BottleIntegration(Integration):
+    identifier = "bottle"
+    origin = f"auto.http.{identifier}"
+
+    transaction_style = ""
+
+    def __init__(
+        self,
+        transaction_style="endpoint",  # type: str
+        *,
+        failed_request_status_codes=_DEFAULT_FAILED_REQUEST_STATUS_CODES,  # type: Set[int]
+    ):
+        # type: (...) -> None
+
+        if transaction_style not in TRANSACTION_STYLE_VALUES:
+            raise ValueError(
+                "Invalid value for transaction_style: %s (must be in %s)"
+                % (transaction_style, TRANSACTION_STYLE_VALUES)
+            )
+        self.transaction_style = transaction_style
+        self.failed_request_status_codes = failed_request_status_codes
+
+    @staticmethod
+    def setup_once():
+        # type: () -> None
+        version = parse_version(BOTTLE_VERSION)
+        _check_minimum_version(BottleIntegration, version)
+
+        old_app = Bottle.__call__
+
+        @ensure_integration_enabled(BottleIntegration, old_app)
+        def sentry_patched_wsgi_app(self, environ, start_response):
+            # type: (Any, Dict[str, str], Callable[..., Any]) -> _ScopedResponse
+            middleware = SentryWsgiMiddleware(
+                lambda *a, **kw: old_app(self, *a, **kw),
+                span_origin=BottleIntegration.origin,
+            )
+
+            return middleware(environ, start_response)
+
+        Bottle.__call__ = sentry_patched_wsgi_app
+
+        old_handle = Bottle._handle
+
+        @functools.wraps(old_handle)
+        def _patched_handle(self, environ):
+            # type: (Bottle, Dict[str, Any]) -> Any
+            integration = sentry_sdk.get_client().get_integration(BottleIntegration)
+            if integration is None:
+                return old_handle(self, environ)
+
+            scope = sentry_sdk.get_isolation_scope()
+            scope._name = "bottle"
+            scope.add_event_processor(
+                _make_request_event_processor(self, bottle_request, integration)
+            )
+            res = old_handle(self, environ)
+
+            return res
+
+        Bottle._handle = _patched_handle
+
+        old_make_callback = Route._make_callback
+
+        @functools.wraps(old_make_callback)
+        def patched_make_callback(self, *args, **kwargs):
+            # type: (Route, *object, **object) -> Any
+            prepared_callback = old_make_callback(self, *args, **kwargs)
+
+            integration = sentry_sdk.get_client().get_integration(BottleIntegration)
+            if integration is None:
+                return prepared_callback
+
+            def wrapped_callback(*args, **kwargs):
+                # type: (*object, **object) -> Any
+                try:
+                    res = prepared_callback(*args, **kwargs)
+                except Exception as exception:
+                    _capture_exception(exception, handled=False)
+                    raise exception
+
+                if (
+                    isinstance(res, HTTPResponse)
+                    and res.status_code in integration.failed_request_status_codes
+                ):
+                    _capture_exception(res, handled=True)
+
+                return res
+
+            return wrapped_callback
+
+        Route._make_callback = patched_make_callback
+
+
+class BottleRequestExtractor(RequestExtractor):
+    def env(self):
+        # type: () -> Dict[str, str]
+        return self.request.environ
+
+    def cookies(self):
+        # type: () -> Dict[str, str]
+        return self.request.cookies
+
+    def raw_data(self):
+        # type: () -> bytes
+        return self.request.body.read()
+
+    def form(self):
+        # type: () -> FormsDict
+        if self.is_json():
+            return None
+        return self.request.forms.decode()
+
+    def files(self):
+        # type: () -> Optional[Dict[str, str]]
+        if self.is_json():
+            return None
+
+        return self.request.files
+
+    def size_of_file(self, file):
+        # type: (FileUpload) -> int
+        return file.content_length
+
+
+def _set_transaction_name_and_source(event, transaction_style, request):
+    # type: (Event, str, Any) -> None
+    name = ""
+
+    if transaction_style == "url":
+        try:
+            name = request.route.rule or ""
+        except RuntimeError:
+            pass
+
+    elif transaction_style == "endpoint":
+        try:
+            name = (
+                request.route.name
+                or transaction_from_function(request.route.callback)
+                or ""
+            )
+        except RuntimeError:
+            pass
+
+    event["transaction"] = name
+    event["transaction_info"] = {"source": SOURCE_FOR_STYLE[transaction_style]}
+
+
+def _make_request_event_processor(app, request, integration):
+    # type: (Bottle, LocalRequest, BottleIntegration) -> EventProcessor
+
+    def event_processor(event, hint):
+        # type: (Event, dict[str, Any]) -> Event
+        _set_transaction_name_and_source(event, integration.transaction_style, request)
+
+        with capture_internal_exceptions():
+            BottleRequestExtractor(request).extract_into_event(event)
+
+        return event
+
+    return event_processor
+
+
+def _capture_exception(exception, handled):
+    # type: (BaseException, bool) -> None
+    event, hint = event_from_exception(
+        exception,
+        client_options=sentry_sdk.get_client().options,
+        mechanism={"type": "bottle", "handled": handled},
+    )
+    sentry_sdk.capture_event(event, hint=hint)
diff --git a/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/celery/__init__.py b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/celery/__init__.py
new file mode 100644
index 00000000..e8811d76
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/celery/__init__.py
@@ -0,0 +1,528 @@
+import sys
+from collections.abc import Mapping
+from functools import wraps
+
+import sentry_sdk
+from sentry_sdk import isolation_scope
+from sentry_sdk.api import continue_trace
+from sentry_sdk.consts import OP, SPANSTATUS, SPANDATA
+from sentry_sdk.integrations import _check_minimum_version, Integration, DidNotEnable
+from sentry_sdk.integrations.celery.beat import (
+    _patch_beat_apply_entry,
+    _patch_redbeat_maybe_due,
+    _setup_celery_beat_signals,
+)
+from sentry_sdk.integrations.celery.utils import _now_seconds_since_epoch
+from sentry_sdk.integrations.logging import ignore_logger
+from sentry_sdk.tracing import BAGGAGE_HEADER_NAME, TransactionSource
+from sentry_sdk.tracing_utils import Baggage
+from sentry_sdk.utils import (
+    capture_internal_exceptions,
+    ensure_integration_enabled,
+    event_from_exception,
+    reraise,
+)
+
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+    from typing import Any
+    from typing import Callable
+    from typing import List
+    from typing import Optional
+    from typing import TypeVar
+    from typing import Union
+
+    from sentry_sdk._types import EventProcessor, Event, Hint, ExcInfo
+    from sentry_sdk.tracing import Span
+
+    F = TypeVar("F", bound=Callable[..., Any])
+
+
+try:
+    from celery import VERSION as CELERY_VERSION  # type: ignore
+    from celery.app.task import Task  # type: ignore
+    from celery.app.trace import task_has_custom
+    from celery.exceptions import (  # type: ignore
+        Ignore,
+        Reject,
+        Retry,
+        SoftTimeLimitExceeded,
+    )
+    from kombu import Producer  # type: ignore
+except ImportError:
+    raise DidNotEnable("Celery not installed")
+
+
+CELERY_CONTROL_FLOW_EXCEPTIONS = (Retry, Ignore, Reject)
+
+
+class CeleryIntegration(Integration):
+    identifier = "celery"
+    origin = f"auto.queue.{identifier}"
+
+    def __init__(
+        self,
+        propagate_traces=True,
+        monitor_beat_tasks=False,
+        exclude_beat_tasks=None,
+    ):
+        # type: (bool, bool, Optional[List[str]]) -> None
+        self.propagate_traces = propagate_traces
+        self.monitor_beat_tasks = monitor_beat_tasks
+        self.exclude_beat_tasks = exclude_beat_tasks
+
+        _patch_beat_apply_entry()
+        _patch_redbeat_maybe_due()
+        _setup_celery_beat_signals(monitor_beat_tasks)
+
+    @staticmethod
+    def setup_once():
+        # type: () -> None
+        _check_minimum_version(CeleryIntegration, CELERY_VERSION)
+
+        _patch_build_tracer()
+        _patch_task_apply_async()
+        _patch_celery_send_task()
+        _patch_worker_exit()
+        _patch_producer_publish()
+
+        # This logger logs every status of every task that ran on the worker.
+        # Meaning that every task's breadcrumbs are full of stuff like "Task
+        # <foo> raised unexpected <bar>".
+        ignore_logger("celery.worker.job")
+        ignore_logger("celery.app.trace")
+
+        # This is stdout/err redirected to a logger, can't deal with this
+        # (need event_level=logging.WARN to reproduce)
+        ignore_logger("celery.redirected")
+
+
+def _set_status(status):
+    # type: (str) -> None
+    with capture_internal_exceptions():
+        scope = sentry_sdk.get_current_scope()
+        if scope.span is not None:
+            scope.span.set_status(status)
+
+
+def _capture_exception(task, exc_info):
+    # type: (Any, ExcInfo) -> None
+    client = sentry_sdk.get_client()
+    if client.get_integration(CeleryIntegration) is None:
+        return
+
+    if isinstance(exc_info[1], CELERY_CONTROL_FLOW_EXCEPTIONS):
+        # ??? Doesn't map to anything
+        _set_status("aborted")
+        return
+
+    _set_status("internal_error")
+
+    if hasattr(task, "throws") and isinstance(exc_info[1], task.throws):
+        return
+
+    event, hint = event_from_exception(
+        exc_info,
+        client_options=client.options,
+        mechanism={"type": "celery", "handled": False},
+    )
+
+    sentry_sdk.capture_event(event, hint=hint)
+
+
+def _make_event_processor(task, uuid, args, kwargs, request=None):
+    # type: (Any, Any, Any, Any, Optional[Any]) -> EventProcessor
+    def event_processor(event, hint):
+        # type: (Event, Hint) -> Optional[Event]
+
+        with capture_internal_exceptions():
+            tags = event.setdefault("tags", {})
+            tags["celery_task_id"] = uuid
+            extra = event.setdefault("extra", {})
+            extra["celery-job"] = {
+                "task_name": task.name,
+                "args": args,
+                "kwargs": kwargs,
+            }
+
+        if "exc_info" in hint:
+            with capture_internal_exceptions():
+                if issubclass(hint["exc_info"][0], SoftTimeLimitExceeded):
+                    event["fingerprint"] = [
+                        "celery",
+                        "SoftTimeLimitExceeded",
+                        getattr(task, "name", task),
+                    ]
+
+        return event
+
+    return event_processor
+
+
+def _update_celery_task_headers(original_headers, span, monitor_beat_tasks):
+    # type: (dict[str, Any], Optional[Span], bool) -> dict[str, Any]
+    """
+    Updates the headers of the Celery task with the tracing information
+    and eventually Sentry Crons monitoring information for beat tasks.
+    """
+    updated_headers = original_headers.copy()
+    with capture_internal_exceptions():
+        # if span is None (when the task was started by Celery Beat)
+        # this will return the trace headers from the scope.
+        headers = dict(
+            sentry_sdk.get_isolation_scope().iter_trace_propagation_headers(span=span)
+        )
+
+        if monitor_beat_tasks:
+            headers.update(
+                {
+                    "sentry-monitor-start-timestamp-s": "%.9f"
+                    % _now_seconds_since_epoch(),
+                }
+            )
+
+        # Add the time the task was enqueued to the headers
+        # This is used in the consumer to calculate the latency
+        updated_headers.update(
+            {"sentry-task-enqueued-time": _now_seconds_since_epoch()}
+        )
+
+        if headers:
+            existing_baggage = updated_headers.get(BAGGAGE_HEADER_NAME)
+            sentry_baggage = headers.get(BAGGAGE_HEADER_NAME)
+
+            combined_baggage = sentry_baggage or existing_baggage
+            if sentry_baggage and existing_baggage:
+                # Merge incoming and sentry baggage, where the sentry trace information
+                # in the incoming baggage takes precedence and the third-party items
+                # are concatenated.
+                incoming = Baggage.from_incoming_header(existing_baggage)
+                combined = Baggage.from_incoming_header(sentry_baggage)
+                combined.sentry_items.update(incoming.sentry_items)
+                combined.third_party_items = ",".join(
+                    [
+                        x
+                        for x in [
+                            combined.third_party_items,
+                            incoming.third_party_items,
+                        ]
+                        if x is not None and x != ""
+                    ]
+                )
+                combined_baggage = combined.serialize(include_third_party=True)
+
+            updated_headers.update(headers)
+            if combined_baggage:
+                updated_headers[BAGGAGE_HEADER_NAME] = combined_baggage
+
+            # https://github.com/celery/celery/issues/4875
+            #
+            # Need to setdefault the inner headers too since other
+            # tracing tools (dd-trace-py) also employ this exact
+            # workaround and we don't want to break them.
+            updated_headers.setdefault("headers", {}).update(headers)
+            if combined_baggage:
+                updated_headers["headers"][BAGGAGE_HEADER_NAME] = combined_baggage
+
+            # Add the Sentry options potentially added in `sentry_apply_entry`
+            # to the headers (done when auto-instrumenting Celery Beat tasks)
+            for key, value in updated_headers.items():
+                if key.startswith("sentry-"):
+                    updated_headers["headers"][key] = value
+
+    return updated_headers
+
+
+class NoOpMgr:
+    def __enter__(self):
+        # type: () -> None
+        return None
+
+    def __exit__(self, exc_type, exc_value, traceback):
+        # type: (Any, Any, Any) -> None
+        return None
+
+
+def _wrap_task_run(f):
+    # type: (F) -> F
+    @wraps(f)
+    def apply_async(*args, **kwargs):
+        # type: (*Any, **Any) -> Any
+        # Note: kwargs can contain headers=None, so no setdefault!
+        # Unsure which backend though.
+        integration = sentry_sdk.get_client().get_integration(CeleryIntegration)
+        if integration is None:
+            return f(*args, **kwargs)
+
+        kwarg_headers = kwargs.get("headers") or {}
+        propagate_traces = kwarg_headers.pop(
+            "sentry-propagate-traces", integration.propagate_traces
+        )
+
+        if not propagate_traces:
+            return f(*args, **kwargs)
+
+        if isinstance(args[0], Task):
+            task_name = args[0].name  # type: str
+        elif len(args) > 1 and isinstance(args[1], str):
+            task_name = args[1]
+        else:
+            task_name = "<unknown Celery task>"
+
+        task_started_from_beat = sentry_sdk.get_isolation_scope()._name == "celery-beat"
+
+        span_mgr = (
+            sentry_sdk.start_span(
+                op=OP.QUEUE_SUBMIT_CELERY,
+                name=task_name,
+                origin=CeleryIntegration.origin,
+            )
+            if not task_started_from_beat
+            else NoOpMgr()
+        )  # type: Union[Span, NoOpMgr]
+
+        with span_mgr as span:
+            kwargs["headers"] = _update_celery_task_headers(
+                kwarg_headers, span, integration.monitor_beat_tasks
+            )
+            return f(*args, **kwargs)
+
+    return apply_async  # type: ignore
+
+
+def _wrap_tracer(task, f):
+    # type: (Any, F) -> F
+
+    # Need to wrap tracer for pushing the scope before prerun is sent, and
+    # popping it after postrun is sent.
+    #
+    # This is the reason we don't use signals for hooking in the first place.
+    # Also because in Celery 3, signal dispatch returns early if one handler
+    # crashes.
+    @wraps(f)
+    @ensure_integration_enabled(CeleryIntegration, f)
+    def _inner(*args, **kwargs):
+        # type: (*Any, **Any) -> Any
+        with isolation_scope() as scope:
+            scope._name = "celery"
+            scope.clear_breadcrumbs()
+            scope.add_event_processor(_make_event_processor(task, *args, **kwargs))
+
+            transaction = None
+
+            # Celery task objects are not a thing to be trusted. Even
+            # something such as attribute access can fail.
+            with capture_internal_exceptions():
+                headers = args[3].get("headers") or {}
+                transaction = continue_trace(
+                    headers,
+                    op=OP.QUEUE_TASK_CELERY,
+                    name="unknown celery task",
+                    source=TransactionSource.TASK,
+                    origin=CeleryIntegration.origin,
+                )
+                transaction.name = task.name
+                transaction.set_status(SPANSTATUS.OK)
+
+            if transaction is None:
+                return f(*args, **kwargs)
+
+            with sentry_sdk.start_transaction(
+                transaction,
+                custom_sampling_context={
+                    "celery_job": {
+                        "task": task.name,
+                        # for some reason, args[1] is a list if non-empty but a
+                        # tuple if empty
+                        "args": list(args[1]),
+                        "kwargs": args[2],
+                    }
+                },
+            ):
+                return f(*args, **kwargs)
+
+    return _inner  # type: ignore
+
+
+def _set_messaging_destination_name(task, span):
+    # type: (Any, Span) -> None
+    """Set "messaging.destination.name" tag for span"""
+    with capture_internal_exceptions():
+        delivery_info = task.request.delivery_info
+        if delivery_info:
+            routing_key = delivery_info.get("routing_key")
+            if delivery_info.get("exchange") == "" and routing_key is not None:
+                # Empty exchange indicates the default exchange, meaning the tasks
+                # are sent to the queue with the same name as the routing key.
+                span.set_data(SPANDATA.MESSAGING_DESTINATION_NAME, routing_key)
+
+
+def _wrap_task_call(task, f):
+    # type: (Any, F) -> F
+
+    # Need to wrap task call because the exception is caught before we get to
+    # see it. Also celery's reported stacktrace is untrustworthy.
+
+    # functools.wraps is important here because celery-once looks at this
+    # method's name. @ensure_integration_enabled internally calls functools.wraps,
+    # but if we ever remove the @ensure_integration_enabled decorator, we need
+    # to add @functools.wraps(f) here.
+    # https://github.com/getsentry/sentry-python/issues/421
+    @ensure_integration_enabled(CeleryIntegration, f)
+    def _inner(*args, **kwargs):
+        # type: (*Any, **Any) -> Any
+        try:
+            with sentry_sdk.start_span(
+                op=OP.QUEUE_PROCESS,
+                name=task.name,
+                origin=CeleryIntegration.origin,
+            ) as span:
+                _set_messaging_destination_name(task, span)
+
+                latency = None
+                with capture_internal_exceptions():
+                    if (
+                        task.request.headers is not None
+                        and "sentry-task-enqueued-time" in task.request.headers
+                    ):
+                        latency = _now_seconds_since_epoch() - task.request.headers.pop(
+                            "sentry-task-enqueued-time"
+                        )
+
+                if latency is not None:
+                    span.set_data(SPANDATA.MESSAGING_MESSAGE_RECEIVE_LATENCY, latency)
+
+                with capture_internal_exceptions():
+                    span.set_data(SPANDATA.MESSAGING_MESSAGE_ID, task.request.id)
+
+                with capture_internal_exceptions():
+                    span.set_data(
+                        SPANDATA.MESSAGING_MESSAGE_RETRY_COUNT, task.request.retries
+                    )
+
+                with capture_internal_exceptions():
+                    span.set_data(
+                        SPANDATA.MESSAGING_SYSTEM,
+                        task.app.connection().transport.driver_type,
+                    )
+
+                return f(*args, **kwargs)
+        except Exception:
+            exc_info = sys.exc_info()
+            with capture_internal_exceptions():
+                _capture_exception(task, exc_info)
+            reraise(*exc_info)
+
+    return _inner  # type: ignore
+
+
+def _patch_build_tracer():
+    # type: () -> None
+    import celery.app.trace as trace  # type: ignore
+
+    original_build_tracer = trace.build_tracer
+
+    def sentry_build_tracer(name, task, *args, **kwargs):
+        # type: (Any, Any, *Any, **Any) -> Any
+        if not getattr(task, "_sentry_is_patched", False):
+            # determine whether Celery will use __call__ or run and patch
+            # accordingly
+            if task_has_custom(task, "__call__"):
+                type(task).__call__ = _wrap_task_call(task, type(task).__call__)
+            else:
+                task.run = _wrap_task_call(task, task.run)
+
+            # `build_tracer` is apparently called for every task
+            # invocation. Can't wrap every celery task for every invocation
+            # or we will get infinitely nested wrapper functions.
+            task._sentry_is_patched = True
+
+        return _wrap_tracer(task, original_build_tracer(name, task, *args, **kwargs))
+
+    trace.build_tracer = sentry_build_tracer
+
+
+def _patch_task_apply_async():
+    # type: () -> None
+    Task.apply_async = _wrap_task_run(Task.apply_async)
+
+
+def _patch_celery_send_task():
+    # type: () -> None
+    from celery import Celery
+
+    Celery.send_task = _wrap_task_run(Celery.send_task)
+
+
+def _patch_worker_exit():
+    # type: () -> None
+
+    # Need to flush queue before worker shutdown because a crashing worker will
+    # call os._exit
+    from billiard.pool import Worker  # type: ignore
+
+    original_workloop = Worker.workloop
+
+    def sentry_workloop(*args, **kwargs):
+        # type: (*Any, **Any) -> Any
+        try:
+            return original_workloop(*args, **kwargs)
+        finally:
+            with capture_internal_exceptions():
+                if (
+                    sentry_sdk.get_client().get_integration(CeleryIntegration)
+                    is not None
+                ):
+                    sentry_sdk.flush()
+
+    Worker.workloop = sentry_workloop
+
+
+def _patch_producer_publish():
+    # type: () -> None
+    original_publish = Producer.publish
+
+    @ensure_integration_enabled(CeleryIntegration, original_publish)
+    def sentry_publish(self, *args, **kwargs):
+        # type: (Producer, *Any, **Any) -> Any
+        kwargs_headers = kwargs.get("headers", {})
+        if not isinstance(kwargs_headers, Mapping):
+            # Ensure kwargs_headers is a Mapping, so we can safely call get().
+            # We don't expect this to happen, but it's better to be safe. Even
+            # if it does happen, only our instrumentation breaks. This line
+            # does not overwrite kwargs["headers"], so the original publish
+            # method will still work.
+            kwargs_headers = {}
+
+        task_name = kwargs_headers.get("task")
+        task_id = kwargs_headers.get("id")
+        retries = kwargs_headers.get("retries")
+
+        routing_key = kwargs.get("routing_key")
+        exchange = kwargs.get("exchange")
+
+        with sentry_sdk.start_span(
+            op=OP.QUEUE_PUBLISH,
+            name=task_name,
+            origin=CeleryIntegration.origin,
+        ) as span:
+            if task_id is not None:
+                span.set_data(SPANDATA.MESSAGING_MESSAGE_ID, task_id)
+
+            if exchange == "" and routing_key is not None:
+                # Empty exchange indicates the default exchange, meaning messages are
+                # routed to the queue with the same name as the routing key.
+                span.set_data(SPANDATA.MESSAGING_DESTINATION_NAME, routing_key)
+
+            if retries is not None:
+                span.set_data(SPANDATA.MESSAGING_MESSAGE_RETRY_COUNT, retries)
+
+            with capture_internal_exceptions():
+                span.set_data(
+                    SPANDATA.MESSAGING_SYSTEM, self.connection.transport.driver_type
+                )
+
+            return original_publish(self, *args, **kwargs)
+
+    Producer.publish = sentry_publish
diff --git a/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/celery/beat.py b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/celery/beat.py
new file mode 100644
index 00000000..ddbc8561
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/celery/beat.py
@@ -0,0 +1,293 @@
+import sentry_sdk
+from sentry_sdk.crons import capture_checkin, MonitorStatus
+from sentry_sdk.integrations import DidNotEnable
+from sentry_sdk.integrations.celery.utils import (
+    _get_humanized_interval,
+    _now_seconds_since_epoch,
+)
+from sentry_sdk.utils import (
+    logger,
+    match_regex_list,
+)
+
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+    from collections.abc import Callable
+    from typing import Any, Optional, TypeVar, Union
+    from sentry_sdk._types import (
+        MonitorConfig,
+        MonitorConfigScheduleType,
+        MonitorConfigScheduleUnit,
+    )
+
+    F = TypeVar("F", bound=Callable[..., Any])
+
+
+try:
+    from celery import Task, Celery  # type: ignore
+    from celery.beat import Scheduler  # type: ignore
+    from celery.schedules import crontab, schedule  # type: ignore
+    from celery.signals import (  # type: ignore
+        task_failure,
+        task_success,
+        task_retry,
+    )
+except ImportError:
+    raise DidNotEnable("Celery not installed")
+
+try:
+    from redbeat.schedulers import RedBeatScheduler  # type: ignore
+except ImportError:
+    RedBeatScheduler = None
+
+
+def _get_headers(task):
+    # type: (Task) -> dict[str, Any]
+    headers = task.request.get("headers") or {}
+
+    # flatten nested headers
+    if "headers" in headers:
+        headers.update(headers["headers"])
+        del headers["headers"]
+
+    headers.update(task.request.get("properties") or {})
+
+    return headers
+
+
+def _get_monitor_config(celery_schedule, app, monitor_name):
+    # type: (Any, Celery, str) -> MonitorConfig
+    monitor_config = {}  # type: MonitorConfig
+    schedule_type = None  # type: Optional[MonitorConfigScheduleType]
+    schedule_value = None  # type: Optional[Union[str, int]]
+    schedule_unit = None  # type: Optional[MonitorConfigScheduleUnit]
+
+    if isinstance(celery_schedule, crontab):
+        schedule_type = "crontab"
+        schedule_value = (
+            "{0._orig_minute} "
+            "{0._orig_hour} "
+            "{0._orig_day_of_month} "
+            "{0._orig_month_of_year} "
+            "{0._orig_day_of_week}".format(celery_schedule)
+        )
+    elif isinstance(celery_schedule, schedule):
+        schedule_type = "interval"
+        (schedule_value, schedule_unit) = _get_humanized_interval(
+            celery_schedule.seconds
+        )
+
+        if schedule_unit == "second":
+            logger.warning(
+                "Intervals shorter than one minute are not supported by Sentry Crons. Monitor '%s' has an interval of %s seconds. Use the `exclude_beat_tasks` option in the celery integration to exclude it.",
+                monitor_name,
+                schedule_value,
+            )
+            return {}
+
+    else:
+        logger.warning(
+            "Celery schedule type '%s' not supported by Sentry Crons.",
+            type(celery_schedule),
+        )
+        return {}
+
+    monitor_config["schedule"] = {}
+    monitor_config["schedule"]["type"] = schedule_type
+    monitor_config["schedule"]["value"] = schedule_value
+
+    if schedule_unit is not None:
+        monitor_config["schedule"]["unit"] = schedule_unit
+
+    monitor_config["timezone"] = (
+        (
+            hasattr(celery_schedule, "tz")
+            and celery_schedule.tz is not None
+            and str(celery_schedule.tz)
+        )
+        or app.timezone
+        or "UTC"
+    )
+
+    return monitor_config
+
+
+def _apply_crons_data_to_schedule_entry(scheduler, schedule_entry, integration):
+    # type: (Any, Any, sentry_sdk.integrations.celery.CeleryIntegration) -> None
+    """
+    Add Sentry Crons information to the schedule_entry headers.
+    """
+    if not integration.monitor_beat_tasks:
+        return
+
+    monitor_name = schedule_entry.name
+
+    task_should_be_excluded = match_regex_list(
+        monitor_name, integration.exclude_beat_tasks
+    )
+    if task_should_be_excluded:
+        return
+
+    celery_schedule = schedule_entry.schedule
+    app = scheduler.app
+
+    monitor_config = _get_monitor_config(celery_schedule, app, monitor_name)
+
+    is_supported_schedule = bool(monitor_config)
+    if not is_supported_schedule:
+        return
+
+    headers = schedule_entry.options.pop("headers", {})
+    headers.update(
+        {
+            "sentry-monitor-slug": monitor_name,
+            "sentry-monitor-config": monitor_config,
+        }
+    )
+
+    check_in_id = capture_checkin(
+        monitor_slug=monitor_name,
+        monitor_config=monitor_config,
+        status=MonitorStatus.IN_PROGRESS,
+    )
+    headers.update({"sentry-monitor-check-in-id": check_in_id})
+
+    # Set the Sentry configuration in the options of the ScheduleEntry.
+    # Those will be picked up in `apply_async` and added to the headers.
+    schedule_entry.options["headers"] = headers
+
+
+def _wrap_beat_scheduler(original_function):
+    # type: (Callable[..., Any]) -> Callable[..., Any]
+    """
+    Makes sure that:
+    - a new Sentry trace is started for each task started by Celery Beat and
+      it is propagated to the task.
+    - the Sentry Crons information is set in the Celery Beat task's
+      headers so that is is monitored with Sentry Crons.
+
+    After the patched function is called,
+    Celery Beat will call apply_async to put the task in the queue.
+    """
+    # Patch only once
+    # Can't use __name__ here, because some of our tests mock original_apply_entry
+    already_patched = "sentry_patched_scheduler" in str(original_function)
+    if already_patched:
+        return original_function
+
+    from sentry_sdk.integrations.celery import CeleryIntegration
+
+    def sentry_patched_scheduler(*args, **kwargs):
+        # type: (*Any, **Any) -> None
+        integration = sentry_sdk.get_client().get_integration(CeleryIntegration)
+        if integration is None:
+            return original_function(*args, **kwargs)
+
+        # Tasks started by Celery Beat start a new Trace
+        scope = sentry_sdk.get_isolation_scope()
+        scope.set_new_propagation_context()
+        scope._name = "celery-beat"
+
+        scheduler, schedule_entry = args
+        _apply_crons_data_to_schedule_entry(scheduler, schedule_entry, integration)
+
+        return original_function(*args, **kwargs)
+
+    return sentry_patched_scheduler
+
+
+def _patch_beat_apply_entry():
+    # type: () -> None
+    Scheduler.apply_entry = _wrap_beat_scheduler(Scheduler.apply_entry)
+
+
+def _patch_redbeat_maybe_due():
+    # type: () -> None
+    if RedBeatScheduler is None:
+        return
+
+    RedBeatScheduler.maybe_due = _wrap_beat_scheduler(RedBeatScheduler.maybe_due)
+
+
+def _setup_celery_beat_signals(monitor_beat_tasks):
+    # type: (bool) -> None
+    if monitor_beat_tasks:
+        task_success.connect(crons_task_success)
+        task_failure.connect(crons_task_failure)
+        task_retry.connect(crons_task_retry)
+
+
+def crons_task_success(sender, **kwargs):
+    # type: (Task, dict[Any, Any]) -> None
+    logger.debug("celery_task_success %s", sender)
+    headers = _get_headers(sender)
+
+    if "sentry-monitor-slug" not in headers:
+        return
+
+    monitor_config = headers.get("sentry-monitor-config", {})
+
+    start_timestamp_s = headers.get("sentry-monitor-start-timestamp-s")
+
+    capture_checkin(
+        monitor_slug=headers["sentry-monitor-slug"],
+        monitor_config=monitor_config,
+        check_in_id=headers["sentry-monitor-check-in-id"],
+        duration=(
+            _now_seconds_since_epoch() - float(start_timestamp_s)
+            if start_timestamp_s
+            else None
+        ),
+        status=MonitorStatus.OK,
+    )
+
+
+def crons_task_failure(sender, **kwargs):
+    # type: (Task, dict[Any, Any]) -> None
+    logger.debug("celery_task_failure %s", sender)
+    headers = _get_headers(sender)
+
+    if "sentry-monitor-slug" not in headers:
+        return
+
+    monitor_config = headers.get("sentry-monitor-config", {})
+
+    start_timestamp_s = headers.get("sentry-monitor-start-timestamp-s")
+
+    capture_checkin(
+        monitor_slug=headers["sentry-monitor-slug"],
+        monitor_config=monitor_config,
+        check_in_id=headers["sentry-monitor-check-in-id"],
+        duration=(
+            _now_seconds_since_epoch() - float(start_timestamp_s)
+            if start_timestamp_s
+            else None
+        ),
+        status=MonitorStatus.ERROR,
+    )
+
+
+def crons_task_retry(sender, **kwargs):
+    # type: (Task, dict[Any, Any]) -> None
+    logger.debug("celery_task_retry %s", sender)
+    headers = _get_headers(sender)
+
+    if "sentry-monitor-slug" not in headers:
+        return
+
+    monitor_config = headers.get("sentry-monitor-config", {})
+
+    start_timestamp_s = headers.get("sentry-monitor-start-timestamp-s")
+
+    capture_checkin(
+        monitor_slug=headers["sentry-monitor-slug"],
+        monitor_config=monitor_config,
+        check_in_id=headers["sentry-monitor-check-in-id"],
+        duration=(
+            _now_seconds_since_epoch() - float(start_timestamp_s)
+            if start_timestamp_s
+            else None
+        ),
+        status=MonitorStatus.ERROR,
+    )
diff --git a/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/celery/utils.py b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/celery/utils.py
new file mode 100644
index 00000000..a1961b15
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/celery/utils.py
@@ -0,0 +1,43 @@
+import time
+from typing import TYPE_CHECKING, cast
+
+if TYPE_CHECKING:
+    from typing import Any, Tuple
+    from sentry_sdk._types import MonitorConfigScheduleUnit
+
+
+def _now_seconds_since_epoch():
+    # type: () -> float
+    # We cannot use `time.perf_counter()` when dealing with the duration
+    # of a Celery task, because the start of a Celery task and
+    # the end are recorded in different processes.
+    # Start happens in the Celery Beat process,
+    # the end in a Celery Worker process.
+    return time.time()
+
+
+def _get_humanized_interval(seconds):
+    # type: (float) -> Tuple[int, MonitorConfigScheduleUnit]
+    TIME_UNITS = (  # noqa: N806
+        ("day", 60 * 60 * 24.0),
+        ("hour", 60 * 60.0),
+        ("minute", 60.0),
+    )
+
+    seconds = float(seconds)
+    for unit, divider in TIME_UNITS:
+        if seconds >= divider:
+            interval = int(seconds / divider)
+            return (interval, cast("MonitorConfigScheduleUnit", unit))
+
+    return (int(seconds), "second")
+
+
+class NoOpMgr:
+    def __enter__(self):
+        # type: () -> None
+        return None
+
+    def __exit__(self, exc_type, exc_value, traceback):
+        # type: (Any, Any, Any) -> None
+        return None
diff --git a/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/chalice.py b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/chalice.py
new file mode 100644
index 00000000..947e41eb
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/chalice.py
@@ -0,0 +1,134 @@
+import sys
+from functools import wraps
+
+import sentry_sdk
+from sentry_sdk.integrations import Integration, DidNotEnable
+from sentry_sdk.integrations.aws_lambda import _make_request_event_processor
+from sentry_sdk.tracing import TransactionSource
+from sentry_sdk.utils import (
+    capture_internal_exceptions,
+    event_from_exception,
+    parse_version,
+    reraise,
+)
+
+try:
+    import chalice  # type: ignore
+    from chalice import __version__ as CHALICE_VERSION
+    from chalice import Chalice, ChaliceViewError
+    from chalice.app import EventSourceHandler as ChaliceEventSourceHandler  # type: ignore
+except ImportError:
+    raise DidNotEnable("Chalice is not installed")
+
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+    from typing import Any
+    from typing import Dict
+    from typing import TypeVar
+    from typing import Callable
+
+    F = TypeVar("F", bound=Callable[..., Any])
+
+
+class EventSourceHandler(ChaliceEventSourceHandler):  # type: ignore
+    def __call__(self, event, context):
+        # type: (Any, Any) -> Any
+        client = sentry_sdk.get_client()
+
+        with sentry_sdk.isolation_scope() as scope:
+            with capture_internal_exceptions():
+                configured_time = context.get_remaining_time_in_millis()
+                scope.add_event_processor(
+                    _make_request_event_processor(event, context, configured_time)
+                )
+            try:
+                return ChaliceEventSourceHandler.__call__(self, event, context)
+            except Exception:
+                exc_info = sys.exc_info()
+                event, hint = event_from_exception(
+                    exc_info,
+                    client_options=client.options,
+                    mechanism={"type": "chalice", "handled": False},
+                )
+                sentry_sdk.capture_event(event, hint=hint)
+                client.flush()
+                reraise(*exc_info)
+
+
+def _get_view_function_response(app, view_function, function_args):
+    # type: (Any, F, Any) -> F
+    @wraps(view_function)
+    def wrapped_view_function(**function_args):
+        # type: (**Any) -> Any
+        client = sentry_sdk.get_client()
+        with sentry_sdk.isolation_scope() as scope:
+            with capture_internal_exceptions():
+                configured_time = app.lambda_context.get_remaining_time_in_millis()
+                scope.set_transaction_name(
+                    app.lambda_context.function_name,
+                    source=TransactionSource.COMPONENT,
+                )
+
+                scope.add_event_processor(
+                    _make_request_event_processor(
+                        app.current_request.to_dict(),
+                        app.lambda_context,
+                        configured_time,
+                    )
+                )
+            try:
+                return view_function(**function_args)
+            except Exception as exc:
+                if isinstance(exc, ChaliceViewError):
+                    raise
+                exc_info = sys.exc_info()
+                event, hint = event_from_exception(
+                    exc_info,
+                    client_options=client.options,
+                    mechanism={"type": "chalice", "handled": False},
+                )
+                sentry_sdk.capture_event(event, hint=hint)
+                client.flush()
+                raise
+
+    return wrapped_view_function  # type: ignore
+
+
+class ChaliceIntegration(Integration):
+    identifier = "chalice"
+
+    @staticmethod
+    def setup_once():
+        # type: () -> None
+
+        version = parse_version(CHALICE_VERSION)
+
+        if version is None:
+            raise DidNotEnable("Unparsable Chalice version: {}".format(CHALICE_VERSION))
+
+        if version < (1, 20):
+            old_get_view_function_response = Chalice._get_view_function_response
+        else:
+            from chalice.app import RestAPIEventHandler
+
+            old_get_view_function_response = (
+                RestAPIEventHandler._get_view_function_response
+            )
+
+        def sentry_event_response(app, view_function, function_args):
+            # type: (Any, F, Dict[str, Any]) -> Any
+            wrapped_view_function = _get_view_function_response(
+                app, view_function, function_args
+            )
+
+            return old_get_view_function_response(
+                app, wrapped_view_function, function_args
+            )
+
+        if version < (1, 20):
+            Chalice._get_view_function_response = sentry_event_response
+        else:
+            RestAPIEventHandler._get_view_function_response = sentry_event_response
+        # for everything else (like events)
+        chalice.app.EventSourceHandler = EventSourceHandler
diff --git a/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/clickhouse_driver.py b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/clickhouse_driver.py
new file mode 100644
index 00000000..2561bfad
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/clickhouse_driver.py
@@ -0,0 +1,157 @@
+import sentry_sdk
+from sentry_sdk.consts import OP, SPANDATA
+from sentry_sdk.integrations import _check_minimum_version, Integration, DidNotEnable
+from sentry_sdk.tracing import Span
+from sentry_sdk.scope import should_send_default_pii
+from sentry_sdk.utils import capture_internal_exceptions, ensure_integration_enabled
+
+from typing import TYPE_CHECKING, TypeVar
+
+# Hack to get new Python features working in older versions
+# without introducing a hard dependency on `typing_extensions`
+# from: https://stackoverflow.com/a/71944042/300572
+if TYPE_CHECKING:
+    from typing import ParamSpec, Callable
+else:
+    # Fake ParamSpec
+    class ParamSpec:
+        def __init__(self, _):
+            self.args = None
+            self.kwargs = None
+
+    # Callable[anything] will return None
+    class _Callable:
+        def __getitem__(self, _):
+            return None
+
+    # Make instances
+    Callable = _Callable()
+
+
+try:
+    import clickhouse_driver  # type: ignore[import-not-found]
+
+except ImportError:
+    raise DidNotEnable("clickhouse-driver not installed.")
+
+
+class ClickhouseDriverIntegration(Integration):
+    identifier = "clickhouse_driver"
+    origin = f"auto.db.{identifier}"
+
+    @staticmethod
+    def setup_once() -> None:
+        _check_minimum_version(ClickhouseDriverIntegration, clickhouse_driver.VERSION)
+
+        # Every query is done using the Connection's `send_query` function
+        clickhouse_driver.connection.Connection.send_query = _wrap_start(
+            clickhouse_driver.connection.Connection.send_query
+        )
+
+        # If the query contains parameters then the send_data function is used to send those parameters to clickhouse
+        clickhouse_driver.client.Client.send_data = _wrap_send_data(
+            clickhouse_driver.client.Client.send_data
+        )
+
+        # Every query ends either with the Client's `receive_end_of_query` (no result expected)
+        # or its `receive_result` (result expected)
+        clickhouse_driver.client.Client.receive_end_of_query = _wrap_end(
+            clickhouse_driver.client.Client.receive_end_of_query
+        )
+        if hasattr(clickhouse_driver.client.Client, "receive_end_of_insert_query"):
+            # In 0.2.7, insert queries are handled separately via `receive_end_of_insert_query`
+            clickhouse_driver.client.Client.receive_end_of_insert_query = _wrap_end(
+                clickhouse_driver.client.Client.receive_end_of_insert_query
+            )
+        clickhouse_driver.client.Client.receive_result = _wrap_end(
+            clickhouse_driver.client.Client.receive_result
+        )
+
+
+P = ParamSpec("P")
+T = TypeVar("T")
+
+
+def _wrap_start(f: Callable[P, T]) -> Callable[P, T]:
+    @ensure_integration_enabled(ClickhouseDriverIntegration, f)
+    def _inner(*args: P.args, **kwargs: P.kwargs) -> T:
+        connection = args[0]
+        query = args[1]
+        query_id = args[2] if len(args) > 2 else kwargs.get("query_id")
+        params = args[3] if len(args) > 3 else kwargs.get("params")
+
+        span = sentry_sdk.start_span(
+            op=OP.DB,
+            name=query,
+            origin=ClickhouseDriverIntegration.origin,
+        )
+
+        connection._sentry_span = span  # type: ignore[attr-defined]
+
+        _set_db_data(span, connection)
+
+        span.set_data("query", query)
+
+        if query_id:
+            span.set_data("db.query_id", query_id)
+
+        if params and should_send_default_pii():
+            span.set_data("db.params", params)
+
+        # run the original code
+        ret = f(*args, **kwargs)
+
+        return ret
+
+    return _inner
+
+
+def _wrap_end(f: Callable[P, T]) -> Callable[P, T]:
+    def _inner_end(*args: P.args, **kwargs: P.kwargs) -> T:
+        res = f(*args, **kwargs)
+        instance = args[0]
+        span = getattr(instance.connection, "_sentry_span", None)  # type: ignore[attr-defined]
+
+        if span is not None:
+            if res is not None and should_send_default_pii():
+                span.set_data("db.result", res)
+
+            with capture_internal_exceptions():
+                span.scope.add_breadcrumb(
+                    message=span._data.pop("query"), category="query", data=span._data
+                )
+
+            span.finish()
+
+        return res
+
+    return _inner_end
+
+
+def _wrap_send_data(f: Callable[P, T]) -> Callable[P, T]:
+    def _inner_send_data(*args: P.args, **kwargs: P.kwargs) -> T:
+        instance = args[0]  # type: clickhouse_driver.client.Client
+        data = args[2]
+        span = getattr(instance.connection, "_sentry_span", None)
+
+        if span is not None:
+            _set_db_data(span, instance.connection)
+
+            if should_send_default_pii():
+                db_params = span._data.get("db.params", [])
+                db_params.extend(data)
+                span.set_data("db.params", db_params)
+
+        return f(*args, **kwargs)
+
+    return _inner_send_data
+
+
+def _set_db_data(
+    span: Span, connection: clickhouse_driver.connection.Connection
+) -> None:
+    span.set_data(SPANDATA.DB_SYSTEM, "clickhouse")
+    span.set_data(SPANDATA.SERVER_ADDRESS, connection.host)
+    span.set_data(SPANDATA.SERVER_PORT, connection.port)
+    span.set_data(SPANDATA.DB_NAME, connection.database)
+    span.set_data(SPANDATA.DB_USER, connection.user)
diff --git a/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/cloud_resource_context.py b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/cloud_resource_context.py
new file mode 100644
index 00000000..ca5ae47e
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/cloud_resource_context.py
@@ -0,0 +1,280 @@
+import json
+import urllib3
+
+from sentry_sdk.integrations import Integration
+from sentry_sdk.api import set_context
+from sentry_sdk.utils import logger
+
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+    from typing import Dict
+
+
+CONTEXT_TYPE = "cloud_resource"
+
+HTTP_TIMEOUT = 2.0
+
+AWS_METADATA_HOST = "169.254.169.254"
+AWS_TOKEN_URL = "http://{}/latest/api/token".format(AWS_METADATA_HOST)
+AWS_METADATA_URL = "http://{}/latest/dynamic/instance-identity/document".format(
+    AWS_METADATA_HOST
+)
+
+GCP_METADATA_HOST = "metadata.google.internal"
+GCP_METADATA_URL = "http://{}/computeMetadata/v1/?recursive=true".format(
+    GCP_METADATA_HOST
+)
+
+
+class CLOUD_PROVIDER:  # noqa: N801
+    """
+    Name of the cloud provider.
+    see https://opentelemetry.io/docs/reference/specification/resource/semantic_conventions/cloud/
+    """
+
+    ALIBABA = "alibaba_cloud"
+    AWS = "aws"
+    AZURE = "azure"
+    GCP = "gcp"
+    IBM = "ibm_cloud"
+    TENCENT = "tencent_cloud"
+
+
+class CLOUD_PLATFORM:  # noqa: N801
+    """
+    The cloud platform.
+    see https://opentelemetry.io/docs/reference/specification/resource/semantic_conventions/cloud/
+    """
+
+    AWS_EC2 = "aws_ec2"
+    GCP_COMPUTE_ENGINE = "gcp_compute_engine"
+
+
+class CloudResourceContextIntegration(Integration):
+    """
+    Adds cloud resource context to the Senty scope
+    """
+
+    identifier = "cloudresourcecontext"
+
+    cloud_provider = ""
+
+    aws_token = ""
+    http = urllib3.PoolManager(timeout=HTTP_TIMEOUT)
+
+    gcp_metadata = None
+
+    def __init__(self, cloud_provider=""):
+        # type: (str) -> None
+        CloudResourceContextIntegration.cloud_provider = cloud_provider
+
+    @classmethod
+    def _is_aws(cls):
+        # type: () -> bool
+        try:
+            r = cls.http.request(
+                "PUT",
+                AWS_TOKEN_URL,
+                headers={"X-aws-ec2-metadata-token-ttl-seconds": "60"},
+            )
+
+            if r.status != 200:
+                return False
+
+            cls.aws_token = r.data.decode()
+            return True
+
+        except urllib3.exceptions.TimeoutError:
+            logger.debug(
+                "AWS metadata service timed out after %s seconds", HTTP_TIMEOUT
+            )
+            return False
+        except Exception as e:
+            logger.debug("Error checking AWS metadata service: %s", str(e))
+            return False
+
+    @classmethod
+    def _get_aws_context(cls):
+        # type: () -> Dict[str, str]
+        ctx = {
+            "cloud.provider": CLOUD_PROVIDER.AWS,
+            "cloud.platform": CLOUD_PLATFORM.AWS_EC2,
+        }
+
+        try:
+            r = cls.http.request(
+                "GET",
+                AWS_METADATA_URL,
+                headers={"X-aws-ec2-metadata-token": cls.aws_token},
+            )
+
+            if r.status != 200:
+                return ctx
+
+            data = json.loads(r.data.decode("utf-8"))
+
+            try:
+                ctx["cloud.account.id"] = data["accountId"]
+            except Exception:
+                pass
+
+            try:
+                ctx["cloud.availability_zone"] = data["availabilityZone"]
+            except Exception:
+                pass
+
+            try:
+                ctx["cloud.region"] = data["region"]
+            except Exception:
+                pass
+
+            try:
+                ctx["host.id"] = data["instanceId"]
+            except Exception:
+                pass
+
+            try:
+                ctx["host.type"] = data["instanceType"]
+            except Exception:
+                pass
+
+        except urllib3.exceptions.TimeoutError:
+            logger.debug(
+                "AWS metadata service timed out after %s seconds", HTTP_TIMEOUT
+            )
+        except Exception as e:
+            logger.debug("Error fetching AWS metadata: %s", str(e))
+
+        return ctx
+
+    @classmethod
+    def _is_gcp(cls):
+        # type: () -> bool
+        try:
+            r = cls.http.request(
+                "GET",
+                GCP_METADATA_URL,
+                headers={"Metadata-Flavor": "Google"},
+            )
+
+            if r.status != 200:
+                return False
+
+            cls.gcp_metadata = json.loads(r.data.decode("utf-8"))
+            return True
+
+        except urllib3.exceptions.TimeoutError:
+            logger.debug(
+                "GCP metadata service timed out after %s seconds", HTTP_TIMEOUT
+            )
+            return False
+        except Exception as e:
+            logger.debug("Error checking GCP metadata service: %s", str(e))
+            return False
+
+    @classmethod
+    def _get_gcp_context(cls):
+        # type: () -> Dict[str, str]
+        ctx = {
+            "cloud.provider": CLOUD_PROVIDER.GCP,
+            "cloud.platform": CLOUD_PLATFORM.GCP_COMPUTE_ENGINE,
+        }
+
+        try:
+            if cls.gcp_metadata is None:
+                r = cls.http.request(
+                    "GET",
+                    GCP_METADATA_URL,
+                    headers={"Metadata-Flavor": "Google"},
+                )
+
+                if r.status != 200:
+                    return ctx
+
+                cls.gcp_metadata = json.loads(r.data.decode("utf-8"))
+
+            try:
+                ctx["cloud.account.id"] = cls.gcp_metadata["project"]["projectId"]
+            except Exception:
+                pass
+
+            try:
+                ctx["cloud.availability_zone"] = cls.gcp_metadata["instance"][
+                    "zone"
+                ].split("/")[-1]
+            except Exception:
+                pass
+
+            try:
+                # only populated in google cloud run
+                ctx["cloud.region"] = cls.gcp_metadata["instance"]["region"].split("/")[
+                    -1
+                ]
+            except Exception:
+                pass
+
+            try:
+                ctx["host.id"] = cls.gcp_metadata["instance"]["id"]
+            except Exception:
+                pass
+
+        except urllib3.exceptions.TimeoutError:
+            logger.debug(
+                "GCP metadata service timed out after %s seconds", HTTP_TIMEOUT
+            )
+        except Exception as e:
+            logger.debug("Error fetching GCP metadata: %s", str(e))
+
+        return ctx
+
+    @classmethod
+    def _get_cloud_provider(cls):
+        # type: () -> str
+        if cls._is_aws():
+            return CLOUD_PROVIDER.AWS
+
+        if cls._is_gcp():
+            return CLOUD_PROVIDER.GCP
+
+        return ""
+
+    @classmethod
+    def _get_cloud_resource_context(cls):
+        # type: () -> Dict[str, str]
+        cloud_provider = (
+            cls.cloud_provider
+            if cls.cloud_provider != ""
+            else CloudResourceContextIntegration._get_cloud_provider()
+        )
+        if cloud_provider in context_getters.keys():
+            return context_getters[cloud_provider]()
+
+        return {}
+
+    @staticmethod
+    def setup_once():
+        # type: () -> None
+        cloud_provider = CloudResourceContextIntegration.cloud_provider
+        unsupported_cloud_provider = (
+            cloud_provider != "" and cloud_provider not in context_getters.keys()
+        )
+
+        if unsupported_cloud_provider:
+            logger.warning(
+                "Invalid value for cloud_provider: %s (must be in %s). Falling back to autodetection...",
+                CloudResourceContextIntegration.cloud_provider,
+                list(context_getters.keys()),
+            )
+
+        context = CloudResourceContextIntegration._get_cloud_resource_context()
+        if context != {}:
+            set_context(CONTEXT_TYPE, context)
+
+
+# Map with the currently supported cloud providers
+# mapping to functions extracting the context
+context_getters = {
+    CLOUD_PROVIDER.AWS: CloudResourceContextIntegration._get_aws_context,
+    CLOUD_PROVIDER.GCP: CloudResourceContextIntegration._get_gcp_context,
+}
diff --git a/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/cohere.py b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/cohere.py
new file mode 100644
index 00000000..b4c2af91
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/cohere.py
@@ -0,0 +1,270 @@
+from functools import wraps
+
+from sentry_sdk import consts
+from sentry_sdk.ai.monitoring import record_token_usage
+from sentry_sdk.consts import SPANDATA
+from sentry_sdk.ai.utils import set_data_normalized
+
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+    from typing import Any, Callable, Iterator
+    from sentry_sdk.tracing import Span
+
+import sentry_sdk
+from sentry_sdk.scope import should_send_default_pii
+from sentry_sdk.integrations import DidNotEnable, Integration
+from sentry_sdk.utils import capture_internal_exceptions, event_from_exception
+
+try:
+    from cohere.client import Client
+    from cohere.base_client import BaseCohere
+    from cohere import (
+        ChatStreamEndEvent,
+        NonStreamedChatResponse,
+    )
+
+    if TYPE_CHECKING:
+        from cohere import StreamedChatResponse
+except ImportError:
+    raise DidNotEnable("Cohere not installed")
+
+try:
+    # cohere 5.9.3+
+    from cohere import StreamEndStreamedChatResponse
+except ImportError:
+    from cohere import StreamedChatResponse_StreamEnd as StreamEndStreamedChatResponse
+
+
+COLLECTED_CHAT_PARAMS = {
+    "model": SPANDATA.AI_MODEL_ID,
+    "k": SPANDATA.AI_TOP_K,
+    "p": SPANDATA.AI_TOP_P,
+    "seed": SPANDATA.AI_SEED,
+    "frequency_penalty": SPANDATA.AI_FREQUENCY_PENALTY,
+    "presence_penalty": SPANDATA.AI_PRESENCE_PENALTY,
+    "raw_prompting": SPANDATA.AI_RAW_PROMPTING,
+}
+
+COLLECTED_PII_CHAT_PARAMS = {
+    "tools": SPANDATA.AI_TOOLS,
+    "preamble": SPANDATA.AI_PREAMBLE,
+}
+
+COLLECTED_CHAT_RESP_ATTRS = {
+    "generation_id": "ai.generation_id",
+    "is_search_required": "ai.is_search_required",
+    "finish_reason": "ai.finish_reason",
+}
+
+COLLECTED_PII_CHAT_RESP_ATTRS = {
+    "citations": "ai.citations",
+    "documents": "ai.documents",
+    "search_queries": "ai.search_queries",
+    "search_results": "ai.search_results",
+    "tool_calls": "ai.tool_calls",
+}
+
+
+class CohereIntegration(Integration):
+    identifier = "cohere"
+    origin = f"auto.ai.{identifier}"
+
+    def __init__(self, include_prompts=True):
+        # type: (CohereIntegration, bool) -> None
+        self.include_prompts = include_prompts
+
+    @staticmethod
+    def setup_once():
+        # type: () -> None
+        BaseCohere.chat = _wrap_chat(BaseCohere.chat, streaming=False)
+        Client.embed = _wrap_embed(Client.embed)
+        BaseCohere.chat_stream = _wrap_chat(BaseCohere.chat_stream, streaming=True)
+
+
+def _capture_exception(exc):
+    # type: (Any) -> None
+    event, hint = event_from_exception(
+        exc,
+        client_options=sentry_sdk.get_client().options,
+        mechanism={"type": "cohere", "handled": False},
+    )
+    sentry_sdk.capture_event(event, hint=hint)
+
+
+def _wrap_chat(f, streaming):
+    # type: (Callable[..., Any], bool) -> Callable[..., Any]
+
+    def collect_chat_response_fields(span, res, include_pii):
+        # type: (Span, NonStreamedChatResponse, bool) -> None
+        if include_pii:
+            if hasattr(res, "text"):
+                set_data_normalized(
+                    span,
+                    SPANDATA.AI_RESPONSES,
+                    [res.text],
+                )
+            for pii_attr in COLLECTED_PII_CHAT_RESP_ATTRS:
+                if hasattr(res, pii_attr):
+                    set_data_normalized(span, "ai." + pii_attr, getattr(res, pii_attr))
+
+        for attr in COLLECTED_CHAT_RESP_ATTRS:
+            if hasattr(res, attr):
+                set_data_normalized(span, "ai." + attr, getattr(res, attr))
+
+        if hasattr(res, "meta"):
+            if hasattr(res.meta, "billed_units"):
+                record_token_usage(
+                    span,
+                    prompt_tokens=res.meta.billed_units.input_tokens,
+                    completion_tokens=res.meta.billed_units.output_tokens,
+                )
+            elif hasattr(res.meta, "tokens"):
+                record_token_usage(
+                    span,
+                    prompt_tokens=res.meta.tokens.input_tokens,
+                    completion_tokens=res.meta.tokens.output_tokens,
+                )
+
+            if hasattr(res.meta, "warnings"):
+                set_data_normalized(span, "ai.warnings", res.meta.warnings)
+
+    @wraps(f)
+    def new_chat(*args, **kwargs):
+        # type: (*Any, **Any) -> Any
+        integration = sentry_sdk.get_client().get_integration(CohereIntegration)
+
+        if (
+            integration is None
+            or "message" not in kwargs
+            or not isinstance(kwargs.get("message"), str)
+        ):
+            return f(*args, **kwargs)
+
+        message = kwargs.get("message")
+
+        span = sentry_sdk.start_span(
+            op=consts.OP.COHERE_CHAT_COMPLETIONS_CREATE,
+            name="cohere.client.Chat",
+            origin=CohereIntegration.origin,
+        )
+        span.__enter__()
+        try:
+            res = f(*args, **kwargs)
+        except Exception as e:
+            _capture_exception(e)
+            span.__exit__(None, None, None)
+            raise e from None
+
+        with capture_internal_exceptions():
+            if should_send_default_pii() and integration.include_prompts:
+                set_data_normalized(
+                    span,
+                    SPANDATA.AI_INPUT_MESSAGES,
+                    list(
+                        map(
+                            lambda x: {
+                                "role": getattr(x, "role", "").lower(),
+                                "content": getattr(x, "message", ""),
+                            },
+                            kwargs.get("chat_history", []),
+                        )
+                    )
+                    + [{"role": "user", "content": message}],
+                )
+                for k, v in COLLECTED_PII_CHAT_PARAMS.items():
+                    if k in kwargs:
+                        set_data_normalized(span, v, kwargs[k])
+
+            for k, v in COLLECTED_CHAT_PARAMS.items():
+                if k in kwargs:
+                    set_data_normalized(span, v, kwargs[k])
+            set_data_normalized(span, SPANDATA.AI_STREAMING, False)
+
+            if streaming:
+                old_iterator = res
+
+                def new_iterator():
+                    # type: () -> Iterator[StreamedChatResponse]
+
+                    with capture_internal_exceptions():
+                        for x in old_iterator:
+                            if isinstance(x, ChatStreamEndEvent) or isinstance(
+                                x, StreamEndStreamedChatResponse
+                            ):
+                                collect_chat_response_fields(
+                                    span,
+                                    x.response,
+                                    include_pii=should_send_default_pii()
+                                    and integration.include_prompts,
+                                )
+                            yield x
+
+                    span.__exit__(None, None, None)
+
+                return new_iterator()
+            elif isinstance(res, NonStreamedChatResponse):
+                collect_chat_response_fields(
+                    span,
+                    res,
+                    include_pii=should_send_default_pii()
+                    and integration.include_prompts,
+                )
+                span.__exit__(None, None, None)
+            else:
+                set_data_normalized(span, "unknown_response", True)
+                span.__exit__(None, None, None)
+            return res
+
+    return new_chat
+
+
+def _wrap_embed(f):
+    # type: (Callable[..., Any]) -> Callable[..., Any]
+
+    @wraps(f)
+    def new_embed(*args, **kwargs):
+        # type: (*Any, **Any) -> Any
+        integration = sentry_sdk.get_client().get_integration(CohereIntegration)
+        if integration is None:
+            return f(*args, **kwargs)
+
+        with sentry_sdk.start_span(
+            op=consts.OP.COHERE_EMBEDDINGS_CREATE,
+            name="Cohere Embedding Creation",
+            origin=CohereIntegration.origin,
+        ) as span:
+            if "texts" in kwargs and (
+                should_send_default_pii() and integration.include_prompts
+            ):
+                if isinstance(kwargs["texts"], str):
+                    set_data_normalized(span, "ai.texts", [kwargs["texts"]])
+                elif (
+                    isinstance(kwargs["texts"], list)
+                    and len(kwargs["texts"]) > 0
+                    and isinstance(kwargs["texts"][0], str)
+                ):
+                    set_data_normalized(
+                        span, SPANDATA.AI_INPUT_MESSAGES, kwargs["texts"]
+                    )
+
+            if "model" in kwargs:
+                set_data_normalized(span, SPANDATA.AI_MODEL_ID, kwargs["model"])
+            try:
+                res = f(*args, **kwargs)
+            except Exception as e:
+                _capture_exception(e)
+                raise e from None
+            if (
+                hasattr(res, "meta")
+                and hasattr(res.meta, "billed_units")
+                and hasattr(res.meta.billed_units, "input_tokens")
+            ):
+                record_token_usage(
+                    span,
+                    prompt_tokens=res.meta.billed_units.input_tokens,
+                    total_tokens=res.meta.billed_units.input_tokens,
+                )
+            return res
+
+    return new_embed
diff --git a/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/dedupe.py b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/dedupe.py
new file mode 100644
index 00000000..a115e352
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/dedupe.py
@@ -0,0 +1,51 @@
+import sentry_sdk
+from sentry_sdk.utils import ContextVar
+from sentry_sdk.integrations import Integration
+from sentry_sdk.scope import add_global_event_processor
+
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+    from typing import Optional
+
+    from sentry_sdk._types import Event, Hint
+
+
+class DedupeIntegration(Integration):
+    identifier = "dedupe"
+
+    def __init__(self):
+        # type: () -> None
+        self._last_seen = ContextVar("last-seen")
+
+    @staticmethod
+    def setup_once():
+        # type: () -> None
+        @add_global_event_processor
+        def processor(event, hint):
+            # type: (Event, Optional[Hint]) -> Optional[Event]
+            if hint is None:
+                return event
+
+            integration = sentry_sdk.get_client().get_integration(DedupeIntegration)
+            if integration is None:
+                return event
+
+            exc_info = hint.get("exc_info", None)
+            if exc_info is None:
+                return event
+
+            exc = exc_info[1]
+            if integration._last_seen.get(None) is exc:
+                return None
+            integration._last_seen.set(exc)
+            return event
+
+    @staticmethod
+    def reset_last_seen():
+        # type: () -> None
+        integration = sentry_sdk.get_client().get_integration(DedupeIntegration)
+        if integration is None:
+            return
+
+        integration._last_seen.set(None)
diff --git a/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/django/__init__.py b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/django/__init__.py
new file mode 100644
index 00000000..ff67b3e3
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/django/__init__.py
@@ -0,0 +1,747 @@
+import inspect
+import sys
+import threading
+import weakref
+from importlib import import_module
+
+import sentry_sdk
+from sentry_sdk.consts import OP, SPANDATA
+from sentry_sdk.scope import add_global_event_processor, should_send_default_pii
+from sentry_sdk.serializer import add_global_repr_processor
+from sentry_sdk.tracing import SOURCE_FOR_STYLE, TransactionSource
+from sentry_sdk.tracing_utils import add_query_source, record_sql_queries
+from sentry_sdk.utils import (
+    AnnotatedValue,
+    HAS_REAL_CONTEXTVARS,
+    CONTEXTVARS_ERROR_MESSAGE,
+    SENSITIVE_DATA_SUBSTITUTE,
+    logger,
+    capture_internal_exceptions,
+    ensure_integration_enabled,
+    event_from_exception,
+    transaction_from_function,
+    walk_exception_chain,
+)
+from sentry_sdk.integrations import _check_minimum_version, Integration, DidNotEnable
+from sentry_sdk.integrations.logging import ignore_logger
+from sentry_sdk.integrations.wsgi import SentryWsgiMiddleware
+from sentry_sdk.integrations._wsgi_common import (
+    DEFAULT_HTTP_METHODS_TO_CAPTURE,
+    RequestExtractor,
+)
+
+try:
+    from django import VERSION as DJANGO_VERSION
+    from django.conf import settings as django_settings
+    from django.core import signals
+    from django.conf import settings
+
+    try:
+        from django.urls import resolve
+    except ImportError:
+        from django.core.urlresolvers import resolve
+
+    try:
+        from django.urls import Resolver404
+    except ImportError:
+        from django.core.urlresolvers import Resolver404
+
+    # Only available in Django 3.0+
+    try:
+        from django.core.handlers.asgi import ASGIRequest
+    except Exception:
+        ASGIRequest = None
+
+except ImportError:
+    raise DidNotEnable("Django not installed")
+
+from sentry_sdk.integrations.django.transactions import LEGACY_RESOLVER
+from sentry_sdk.integrations.django.templates import (
+    get_template_frame_from_exception,
+    patch_templates,
+)
+from sentry_sdk.integrations.django.middleware import patch_django_middlewares
+from sentry_sdk.integrations.django.signals_handlers import patch_signals
+from sentry_sdk.integrations.django.views import patch_views
+
+if DJANGO_VERSION[:2] > (1, 8):
+    from sentry_sdk.integrations.django.caching import patch_caching
+else:
+    patch_caching = None  # type: ignore
+
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+    from typing import Any
+    from typing import Callable
+    from typing import Dict
+    from typing import Optional
+    from typing import Union
+    from typing import List
+
+    from django.core.handlers.wsgi import WSGIRequest
+    from django.http.response import HttpResponse
+    from django.http.request import QueryDict
+    from django.utils.datastructures import MultiValueDict
+
+    from sentry_sdk.tracing import Span
+    from sentry_sdk.integrations.wsgi import _ScopedResponse
+    from sentry_sdk._types import Event, Hint, EventProcessor, NotImplementedType
+
+
+if DJANGO_VERSION < (1, 10):
+
+    def is_authenticated(request_user):
+        # type: (Any) -> bool
+        return request_user.is_authenticated()
+
+else:
+
+    def is_authenticated(request_user):
+        # type: (Any) -> bool
+        return request_user.is_authenticated
+
+
+TRANSACTION_STYLE_VALUES = ("function_name", "url")
+
+
+class DjangoIntegration(Integration):
+    """
+    Auto instrument a Django application.
+
+    :param transaction_style: How to derive transaction names. Either `"function_name"` or `"url"`. Defaults to `"url"`.
+    :param middleware_spans: Whether to create spans for middleware. Defaults to `True`.
+    :param signals_spans: Whether to create spans for signals. Defaults to `True`.
+    :param signals_denylist: A list of signals to ignore when creating spans.
+    :param cache_spans: Whether to create spans for cache operations. Defaults to `False`.
+    """
+
+    identifier = "django"
+    origin = f"auto.http.{identifier}"
+    origin_db = f"auto.db.{identifier}"
+
+    transaction_style = ""
+    middleware_spans = None
+    signals_spans = None
+    cache_spans = None
+    signals_denylist = []  # type: list[signals.Signal]
+
+    def __init__(
+        self,
+        transaction_style="url",  # type: str
+        middleware_spans=True,  # type: bool
+        signals_spans=True,  # type: bool
+        cache_spans=False,  # type: bool
+        signals_denylist=None,  # type: Optional[list[signals.Signal]]
+        http_methods_to_capture=DEFAULT_HTTP_METHODS_TO_CAPTURE,  # type: tuple[str, ...]
+    ):
+        # type: (...) -> None
+        if transaction_style not in TRANSACTION_STYLE_VALUES:
+            raise ValueError(
+                "Invalid value for transaction_style: %s (must be in %s)"
+                % (transaction_style, TRANSACTION_STYLE_VALUES)
+            )
+        self.transaction_style = transaction_style
+        self.middleware_spans = middleware_spans
+
+        self.signals_spans = signals_spans
+        self.signals_denylist = signals_denylist or []
+
+        self.cache_spans = cache_spans
+
+        self.http_methods_to_capture = tuple(map(str.upper, http_methods_to_capture))
+
+    @staticmethod
+    def setup_once():
+        # type: () -> None
+        _check_minimum_version(DjangoIntegration, DJANGO_VERSION)
+
+        install_sql_hook()
+        # Patch in our custom middleware.
+
+        # logs an error for every 500
+        ignore_logger("django.server")
+        ignore_logger("django.request")
+
+        from django.core.handlers.wsgi import WSGIHandler
+
+        old_app = WSGIHandler.__call__
+
+        @ensure_integration_enabled(DjangoIntegration, old_app)
+        def sentry_patched_wsgi_handler(self, environ, start_response):
+            # type: (Any, Dict[str, str], Callable[..., Any]) -> _ScopedResponse
+            bound_old_app = old_app.__get__(self, WSGIHandler)
+
+            from django.conf import settings
+
+            use_x_forwarded_for = settings.USE_X_FORWARDED_HOST
+
+            integration = sentry_sdk.get_client().get_integration(DjangoIntegration)
+
+            middleware = SentryWsgiMiddleware(
+                bound_old_app,
+                use_x_forwarded_for,
+                span_origin=DjangoIntegration.origin,
+                http_methods_to_capture=(
+                    integration.http_methods_to_capture
+                    if integration
+                    else DEFAULT_HTTP_METHODS_TO_CAPTURE
+                ),
+            )
+            return middleware(environ, start_response)
+
+        WSGIHandler.__call__ = sentry_patched_wsgi_handler
+
+        _patch_get_response()
+
+        _patch_django_asgi_handler()
+
+        signals.got_request_exception.connect(_got_request_exception)
+
+        @add_global_event_processor
+        def process_django_templates(event, hint):
+            # type: (Event, Optional[Hint]) -> Optional[Event]
+            if hint is None:
+                return event
+
+            exc_info = hint.get("exc_info", None)
+
+            if exc_info is None:
+                return event
+
+            exception = event.get("exception", None)
+
+            if exception is None:
+                return event
+
+            values = exception.get("values", None)
+
+            if values is None:
+                return event
+
+            for exception, (_, exc_value, _) in zip(
+                reversed(values), walk_exception_chain(exc_info)
+            ):
+                frame = get_template_frame_from_exception(exc_value)
+                if frame is not None:
+                    frames = exception.get("stacktrace", {}).get("frames", [])
+
+                    for i in reversed(range(len(frames))):
+                        f = frames[i]
+                        if (
+                            f.get("function") in ("Parser.parse", "parse", "render")
+                            and f.get("module") == "django.template.base"
+                        ):
+                            i += 1
+                            break
+                    else:
+                        i = len(frames)
+
+                    frames.insert(i, frame)
+
+            return event
+
+        @add_global_repr_processor
+        def _django_queryset_repr(value, hint):
+            # type: (Any, Dict[str, Any]) -> Union[NotImplementedType, str]
+            try:
+                # Django 1.6 can fail to import `QuerySet` when Django settings
+                # have not yet been initialized.
+                #
+                # If we fail to import, return `NotImplemented`. It's at least
+                # unlikely that we have a query set in `value` when importing
+                # `QuerySet` fails.
+                from django.db.models.query import QuerySet
+            except Exception:
+                return NotImplemented
+
+            if not isinstance(value, QuerySet) or value._result_cache:
+                return NotImplemented
+
+            return "<%s from %s at 0x%x>" % (
+                value.__class__.__name__,
+                value.__module__,
+                id(value),
+            )
+
+        _patch_channels()
+        patch_django_middlewares()
+        patch_views()
+        patch_templates()
+        patch_signals()
+
+        if patch_caching is not None:
+            patch_caching()
+
+
+_DRF_PATCHED = False
+_DRF_PATCH_LOCK = threading.Lock()
+
+
+def _patch_drf():
+    # type: () -> None
+    """
+    Patch Django Rest Framework for more/better request data. DRF's request
+    type is a wrapper around Django's request type. The attribute we're
+    interested in is `request.data`, which is a cached property containing a
+    parsed request body. Reading a request body from that property is more
+    reliable than reading from any of Django's own properties, as those don't
+    hold payloads in memory and therefore can only be accessed once.
+
+    We patch the Django request object to include a weak backreference to the
+    DRF request object, such that we can later use either in
+    `DjangoRequestExtractor`.
+
+    This function is not called directly on SDK setup, because importing almost
+    any part of Django Rest Framework will try to access Django settings (where
+    `sentry_sdk.init()` might be called from in the first place). Instead we
+    run this function on every request and do the patching on the first
+    request.
+    """
+
+    global _DRF_PATCHED
+
+    if _DRF_PATCHED:
+        # Double-checked locking
+        return
+
+    with _DRF_PATCH_LOCK:
+        if _DRF_PATCHED:
+            return
+
+        # We set this regardless of whether the code below succeeds or fails.
+        # There is no point in trying to patch again on the next request.
+        _DRF_PATCHED = True
+
+        with capture_internal_exceptions():
+            try:
+                from rest_framework.views import APIView  # type: ignore
+            except ImportError:
+                pass
+            else:
+                old_drf_initial = APIView.initial
+
+                def sentry_patched_drf_initial(self, request, *args, **kwargs):
+                    # type: (APIView, Any, *Any, **Any) -> Any
+                    with capture_internal_exceptions():
+                        request._request._sentry_drf_request_backref = weakref.ref(
+                            request
+                        )
+                        pass
+                    return old_drf_initial(self, request, *args, **kwargs)
+
+                APIView.initial = sentry_patched_drf_initial
+
+
+def _patch_channels():
+    # type: () -> None
+    try:
+        from channels.http import AsgiHandler  # type: ignore
+    except ImportError:
+        return
+
+    if not HAS_REAL_CONTEXTVARS:
+        # We better have contextvars or we're going to leak state between
+        # requests.
+        #
+        # We cannot hard-raise here because channels may not be used at all in
+        # the current process. That is the case when running traditional WSGI
+        # workers in gunicorn+gevent and the websocket stuff in a separate
+        # process.
+        logger.warning(
+            "We detected that you are using Django channels 2.0."
+            + CONTEXTVARS_ERROR_MESSAGE
+        )
+
+    from sentry_sdk.integrations.django.asgi import patch_channels_asgi_handler_impl
+
+    patch_channels_asgi_handler_impl(AsgiHandler)
+
+
+def _patch_django_asgi_handler():
+    # type: () -> None
+    try:
+        from django.core.handlers.asgi import ASGIHandler
+    except ImportError:
+        return
+
+    if not HAS_REAL_CONTEXTVARS:
+        # We better have contextvars or we're going to leak state between
+        # requests.
+        #
+        # We cannot hard-raise here because Django's ASGI stuff may not be used
+        # at all.
+        logger.warning(
+            "We detected that you are using Django 3." + CONTEXTVARS_ERROR_MESSAGE
+        )
+
+    from sentry_sdk.integrations.django.asgi import patch_django_asgi_handler_impl
+
+    patch_django_asgi_handler_impl(ASGIHandler)
+
+
+def _set_transaction_name_and_source(scope, transaction_style, request):
+    # type: (sentry_sdk.Scope, str, WSGIRequest) -> None
+    try:
+        transaction_name = None
+        if transaction_style == "function_name":
+            fn = resolve(request.path).func
+            transaction_name = transaction_from_function(getattr(fn, "view_class", fn))
+
+        elif transaction_style == "url":
+            if hasattr(request, "urlconf"):
+                transaction_name = LEGACY_RESOLVER.resolve(
+                    request.path_info, urlconf=request.urlconf
+                )
+            else:
+                transaction_name = LEGACY_RESOLVER.resolve(request.path_info)
+
+        if transaction_name is None:
+            transaction_name = request.path_info
+            source = TransactionSource.URL
+        else:
+            source = SOURCE_FOR_STYLE[transaction_style]
+
+        scope.set_transaction_name(
+            transaction_name,
+            source=source,
+        )
+    except Resolver404:
+        urlconf = import_module(settings.ROOT_URLCONF)
+        # This exception only gets thrown when transaction_style is `function_name`
+        # So we don't check here what style is configured
+        if hasattr(urlconf, "handler404"):
+            handler = urlconf.handler404
+            if isinstance(handler, str):
+                scope.transaction = handler
+            else:
+                scope.transaction = transaction_from_function(
+                    getattr(handler, "view_class", handler)
+                )
+    except Exception:
+        pass
+
+
+def _before_get_response(request):
+    # type: (WSGIRequest) -> None
+    integration = sentry_sdk.get_client().get_integration(DjangoIntegration)
+    if integration is None:
+        return
+
+    _patch_drf()
+
+    scope = sentry_sdk.get_current_scope()
+    # Rely on WSGI middleware to start a trace
+    _set_transaction_name_and_source(scope, integration.transaction_style, request)
+
+    scope.add_event_processor(
+        _make_wsgi_request_event_processor(weakref.ref(request), integration)
+    )
+
+
+def _attempt_resolve_again(request, scope, transaction_style):
+    # type: (WSGIRequest, sentry_sdk.Scope, str) -> None
+    """
+    Some django middlewares overwrite request.urlconf
+    so we need to respect that contract,
+    so we try to resolve the url again.
+    """
+    if not hasattr(request, "urlconf"):
+        return
+
+    _set_transaction_name_and_source(scope, transaction_style, request)
+
+
+def _after_get_response(request):
+    # type: (WSGIRequest) -> None
+    integration = sentry_sdk.get_client().get_integration(DjangoIntegration)
+    if integration is None or integration.transaction_style != "url":
+        return
+
+    scope = sentry_sdk.get_current_scope()
+    _attempt_resolve_again(request, scope, integration.transaction_style)
+
+
+def _patch_get_response():
+    # type: () -> None
+    """
+    patch get_response, because at that point we have the Django request object
+    """
+    from django.core.handlers.base import BaseHandler
+
+    old_get_response = BaseHandler.get_response
+
+    def sentry_patched_get_response(self, request):
+        # type: (Any, WSGIRequest) -> Union[HttpResponse, BaseException]
+        _before_get_response(request)
+        rv = old_get_response(self, request)
+        _after_get_response(request)
+        return rv
+
+    BaseHandler.get_response = sentry_patched_get_response
+
+    if hasattr(BaseHandler, "get_response_async"):
+        from sentry_sdk.integrations.django.asgi import patch_get_response_async
+
+        patch_get_response_async(BaseHandler, _before_get_response)
+
+
+def _make_wsgi_request_event_processor(weak_request, integration):
+    # type: (Callable[[], WSGIRequest], DjangoIntegration) -> EventProcessor
+    def wsgi_request_event_processor(event, hint):
+        # type: (Event, dict[str, Any]) -> Event
+        # if the request is gone we are fine not logging the data from
+        # it.  This might happen if the processor is pushed away to
+        # another thread.
+        request = weak_request()
+        if request is None:
+            return event
+
+        django_3 = ASGIRequest is not None
+        if django_3 and type(request) == ASGIRequest:
+            # We have a `asgi_request_event_processor` for this.
+            return event
+
+        with capture_internal_exceptions():
+            DjangoRequestExtractor(request).extract_into_event(event)
+
+        if should_send_default_pii():
+            with capture_internal_exceptions():
+                _set_user_info(request, event)
+
+        return event
+
+    return wsgi_request_event_processor
+
+
+def _got_request_exception(request=None, **kwargs):
+    # type: (WSGIRequest, **Any) -> None
+    client = sentry_sdk.get_client()
+    integration = client.get_integration(DjangoIntegration)
+    if integration is None:
+        return
+
+    if request is not None and integration.transaction_style == "url":
+        scope = sentry_sdk.get_current_scope()
+        _attempt_resolve_again(request, scope, integration.transaction_style)
+
+    event, hint = event_from_exception(
+        sys.exc_info(),
+        client_options=client.options,
+        mechanism={"type": "django", "handled": False},
+    )
+    sentry_sdk.capture_event(event, hint=hint)
+
+
+class DjangoRequestExtractor(RequestExtractor):
+    def __init__(self, request):
+        # type: (Union[WSGIRequest, ASGIRequest]) -> None
+        try:
+            drf_request = request._sentry_drf_request_backref()
+            if drf_request is not None:
+                request = drf_request
+        except AttributeError:
+            pass
+        self.request = request
+
+    def env(self):
+        # type: () -> Dict[str, str]
+        return self.request.META
+
+    def cookies(self):
+        # type: () -> Dict[str, Union[str, AnnotatedValue]]
+        privacy_cookies = [
+            django_settings.CSRF_COOKIE_NAME,
+            django_settings.SESSION_COOKIE_NAME,
+        ]
+
+        clean_cookies = {}  # type: Dict[str, Union[str, AnnotatedValue]]
+        for key, val in self.request.COOKIES.items():
+            if key in privacy_cookies:
+                clean_cookies[key] = SENSITIVE_DATA_SUBSTITUTE
+            else:
+                clean_cookies[key] = val
+
+        return clean_cookies
+
+    def raw_data(self):
+        # type: () -> bytes
+        return self.request.body
+
+    def form(self):
+        # type: () -> QueryDict
+        return self.request.POST
+
+    def files(self):
+        # type: () -> MultiValueDict
+        return self.request.FILES
+
+    def size_of_file(self, file):
+        # type: (Any) -> int
+        return file.size
+
+    def parsed_body(self):
+        # type: () -> Optional[Dict[str, Any]]
+        try:
+            return self.request.data
+        except Exception:
+            return RequestExtractor.parsed_body(self)
+
+
+def _set_user_info(request, event):
+    # type: (WSGIRequest, Event) -> None
+    user_info = event.setdefault("user", {})
+
+    user = getattr(request, "user", None)
+
+    if user is None or not is_authenticated(user):
+        return
+
+    try:
+        user_info.setdefault("id", str(user.pk))
+    except Exception:
+        pass
+
+    try:
+        user_info.setdefault("email", user.email)
+    except Exception:
+        pass
+
+    try:
+        user_info.setdefault("username", user.get_username())
+    except Exception:
+        pass
+
+
+def install_sql_hook():
+    # type: () -> None
+    """If installed this causes Django's queries to be captured."""
+    try:
+        from django.db.backends.utils import CursorWrapper
+    except ImportError:
+        from django.db.backends.util import CursorWrapper
+
+    try:
+        # django 1.6 and 1.7 compatability
+        from django.db.backends import BaseDatabaseWrapper
+    except ImportError:
+        # django 1.8 or later
+        from django.db.backends.base.base import BaseDatabaseWrapper
+
+    try:
+        real_execute = CursorWrapper.execute
+        real_executemany = CursorWrapper.executemany
+        real_connect = BaseDatabaseWrapper.connect
+    except AttributeError:
+        # This won't work on Django versions < 1.6
+        return
+
+    @ensure_integration_enabled(DjangoIntegration, real_execute)
+    def execute(self, sql, params=None):
+        # type: (CursorWrapper, Any, Optional[Any]) -> Any
+        with record_sql_queries(
+            cursor=self.cursor,
+            query=sql,
+            params_list=params,
+            paramstyle="format",
+            executemany=False,
+            span_origin=DjangoIntegration.origin_db,
+        ) as span:
+            _set_db_data(span, self)
+            result = real_execute(self, sql, params)
+
+        with capture_internal_exceptions():
+            add_query_source(span)
+
+        return result
+
+    @ensure_integration_enabled(DjangoIntegration, real_executemany)
+    def executemany(self, sql, param_list):
+        # type: (CursorWrapper, Any, List[Any]) -> Any
+        with record_sql_queries(
+            cursor=self.cursor,
+            query=sql,
+            params_list=param_list,
+            paramstyle="format",
+            executemany=True,
+            span_origin=DjangoIntegration.origin_db,
+        ) as span:
+            _set_db_data(span, self)
+
+            result = real_executemany(self, sql, param_list)
+
+        with capture_internal_exceptions():
+            add_query_source(span)
+
+        return result
+
+    @ensure_integration_enabled(DjangoIntegration, real_connect)
+    def connect(self):
+        # type: (BaseDatabaseWrapper) -> None
+        with capture_internal_exceptions():
+            sentry_sdk.add_breadcrumb(message="connect", category="query")
+
+        with sentry_sdk.start_span(
+            op=OP.DB,
+            name="connect",
+            origin=DjangoIntegration.origin_db,
+        ) as span:
+            _set_db_data(span, self)
+            return real_connect(self)
+
+    CursorWrapper.execute = execute
+    CursorWrapper.executemany = executemany
+    BaseDatabaseWrapper.connect = connect
+    ignore_logger("django.db.backends")
+
+
+def _set_db_data(span, cursor_or_db):
+    # type: (Span, Any) -> None
+    db = cursor_or_db.db if hasattr(cursor_or_db, "db") else cursor_or_db
+    vendor = db.vendor
+    span.set_data(SPANDATA.DB_SYSTEM, vendor)
+
+    # Some custom backends override `__getattr__`, making it look like `cursor_or_db`
+    # actually has a `connection` and the `connection` has a `get_dsn_parameters`
+    # attribute, only to throw an error once you actually want to call it.
+    # Hence the `inspect` check whether `get_dsn_parameters` is an actual callable
+    # function.
+    is_psycopg2 = (
+        hasattr(cursor_or_db, "connection")
+        and hasattr(cursor_or_db.connection, "get_dsn_parameters")
+        and inspect.isroutine(cursor_or_db.connection.get_dsn_parameters)
+    )
+    if is_psycopg2:
+        connection_params = cursor_or_db.connection.get_dsn_parameters()
+    else:
+        try:
+            # psycopg3, only extract needed params as get_parameters
+            # can be slow because of the additional logic to filter out default
+            # values
+            connection_params = {
+                "dbname": cursor_or_db.connection.info.dbname,
+                "port": cursor_or_db.connection.info.port,
+            }
+            # PGhost returns host or base dir of UNIX socket as an absolute path
+            # starting with /, use it only when it contains host
+            pg_host = cursor_or_db.connection.info.host
+            if pg_host and not pg_host.startswith("/"):
+                connection_params["host"] = pg_host
+        except Exception:
+            connection_params = db.get_connection_params()
+
+    db_name = connection_params.get("dbname") or connection_params.get("database")
+    if db_name is not None:
+        span.set_data(SPANDATA.DB_NAME, db_name)
+
+    server_address = connection_params.get("host")
+    if server_address is not None:
+        span.set_data(SPANDATA.SERVER_ADDRESS, server_address)
+
+    server_port = connection_params.get("port")
+    if server_port is not None:
+        span.set_data(SPANDATA.SERVER_PORT, str(server_port))
+
+    server_socket_address = connection_params.get("unix_socket")
+    if server_socket_address is not None:
+        span.set_data(SPANDATA.SERVER_SOCKET_ADDRESS, server_socket_address)
diff --git a/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/django/asgi.py b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/django/asgi.py
new file mode 100644
index 00000000..73a25acc
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/django/asgi.py
@@ -0,0 +1,245 @@
+"""
+Instrumentation for Django 3.0
+
+Since this file contains `async def` it is conditionally imported in
+`sentry_sdk.integrations.django` (depending on the existence of
+`django.core.handlers.asgi`.
+"""
+
+import asyncio
+import functools
+import inspect
+
+from django.core.handlers.wsgi import WSGIRequest
+
+import sentry_sdk
+from sentry_sdk.consts import OP
+
+from sentry_sdk.integrations.asgi import SentryAsgiMiddleware
+from sentry_sdk.scope import should_send_default_pii
+from sentry_sdk.utils import (
+    capture_internal_exceptions,
+    ensure_integration_enabled,
+)
+
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+    from typing import Any, Callable, Union, TypeVar
+
+    from django.core.handlers.asgi import ASGIRequest
+    from django.http.response import HttpResponse
+
+    from sentry_sdk._types import Event, EventProcessor
+
+    _F = TypeVar("_F", bound=Callable[..., Any])
+
+
+# Python 3.12 deprecates asyncio.iscoroutinefunction() as an alias for
+# inspect.iscoroutinefunction(), whilst also removing the _is_coroutine marker.
+# The latter is replaced with the inspect.markcoroutinefunction decorator.
+# Until 3.12 is the minimum supported Python version, provide a shim.
+# This was copied from https://github.com/django/asgiref/blob/main/asgiref/sync.py
+if hasattr(inspect, "markcoroutinefunction"):
+    iscoroutinefunction = inspect.iscoroutinefunction
+    markcoroutinefunction = inspect.markcoroutinefunction
+else:
+    iscoroutinefunction = asyncio.iscoroutinefunction  # type: ignore[assignment]
+
+    def markcoroutinefunction(func: "_F") -> "_F":
+        func._is_coroutine = asyncio.coroutines._is_coroutine  # type: ignore
+        return func
+
+
+def _make_asgi_request_event_processor(request):
+    # type: (ASGIRequest) -> EventProcessor
+    def asgi_request_event_processor(event, hint):
+        # type: (Event, dict[str, Any]) -> Event
+        # if the request is gone we are fine not logging the data from
+        # it.  This might happen if the processor is pushed away to
+        # another thread.
+        from sentry_sdk.integrations.django import (
+            DjangoRequestExtractor,
+            _set_user_info,
+        )
+
+        if request is None:
+            return event
+
+        if type(request) == WSGIRequest:
+            return event
+
+        with capture_internal_exceptions():
+            DjangoRequestExtractor(request).extract_into_event(event)
+
+        if should_send_default_pii():
+            with capture_internal_exceptions():
+                _set_user_info(request, event)
+
+        return event
+
+    return asgi_request_event_processor
+
+
+def patch_django_asgi_handler_impl(cls):
+    # type: (Any) -> None
+
+    from sentry_sdk.integrations.django import DjangoIntegration
+
+    old_app = cls.__call__
+
+    async def sentry_patched_asgi_handler(self, scope, receive, send):
+        # type: (Any, Any, Any, Any) -> Any
+        integration = sentry_sdk.get_client().get_integration(DjangoIntegration)
+        if integration is None:
+            return await old_app(self, scope, receive, send)
+
+        middleware = SentryAsgiMiddleware(
+            old_app.__get__(self, cls),
+            unsafe_context_data=True,
+            span_origin=DjangoIntegration.origin,
+            http_methods_to_capture=integration.http_methods_to_capture,
+        )._run_asgi3
+
+        return await middleware(scope, receive, send)
+
+    cls.__call__ = sentry_patched_asgi_handler
+
+    modern_django_asgi_support = hasattr(cls, "create_request")
+    if modern_django_asgi_support:
+        old_create_request = cls.create_request
+
+        @ensure_integration_enabled(DjangoIntegration, old_create_request)
+        def sentry_patched_create_request(self, *args, **kwargs):
+            # type: (Any, *Any, **Any) -> Any
+            request, error_response = old_create_request(self, *args, **kwargs)
+            scope = sentry_sdk.get_isolation_scope()
+            scope.add_event_processor(_make_asgi_request_event_processor(request))
+
+            return request, error_response
+
+        cls.create_request = sentry_patched_create_request
+
+
+def patch_get_response_async(cls, _before_get_response):
+    # type: (Any, Any) -> None
+    old_get_response_async = cls.get_response_async
+
+    async def sentry_patched_get_response_async(self, request):
+        # type: (Any, Any) -> Union[HttpResponse, BaseException]
+        _before_get_response(request)
+        return await old_get_response_async(self, request)
+
+    cls.get_response_async = sentry_patched_get_response_async
+
+
+def patch_channels_asgi_handler_impl(cls):
+    # type: (Any) -> None
+    import channels  # type: ignore
+
+    from sentry_sdk.integrations.django import DjangoIntegration
+
+    if channels.__version__ < "3.0.0":
+        old_app = cls.__call__
+
+        async def sentry_patched_asgi_handler(self, receive, send):
+            # type: (Any, Any, Any) -> Any
+            integration = sentry_sdk.get_client().get_integration(DjangoIntegration)
+            if integration is None:
+                return await old_app(self, receive, send)
+
+            middleware = SentryAsgiMiddleware(
+                lambda _scope: old_app.__get__(self, cls),
+                unsafe_context_data=True,
+                span_origin=DjangoIntegration.origin,
+                http_methods_to_capture=integration.http_methods_to_capture,
+            )
+
+            return await middleware(self.scope)(receive, send)
+
+        cls.__call__ = sentry_patched_asgi_handler
+
+    else:
+        # The ASGI handler in Channels >= 3 has the same signature as
+        # the Django handler.
+        patch_django_asgi_handler_impl(cls)
+
+
+def wrap_async_view(callback):
+    # type: (Any) -> Any
+    from sentry_sdk.integrations.django import DjangoIntegration
+
+    @functools.wraps(callback)
+    async def sentry_wrapped_callback(request, *args, **kwargs):
+        # type: (Any, *Any, **Any) -> Any
+        current_scope = sentry_sdk.get_current_scope()
+        if current_scope.transaction is not None:
+            current_scope.transaction.update_active_thread()
+
+        sentry_scope = sentry_sdk.get_isolation_scope()
+        if sentry_scope.profile is not None:
+            sentry_scope.profile.update_active_thread_id()
+
+        with sentry_sdk.start_span(
+            op=OP.VIEW_RENDER,
+            name=request.resolver_match.view_name,
+            origin=DjangoIntegration.origin,
+        ):
+            return await callback(request, *args, **kwargs)
+
+    return sentry_wrapped_callback
+
+
+def _asgi_middleware_mixin_factory(_check_middleware_span):
+    # type: (Callable[..., Any]) -> Any
+    """
+    Mixin class factory that generates a middleware mixin for handling requests
+    in async mode.
+    """
+
+    class SentryASGIMixin:
+        if TYPE_CHECKING:
+            _inner = None
+
+        def __init__(self, get_response):
+            # type: (Callable[..., Any]) -> None
+            self.get_response = get_response
+            self._acall_method = None
+            self._async_check()
+
+        def _async_check(self):
+            # type: () -> None
+            """
+            If get_response is a coroutine function, turns us into async mode so
+            a thread is not consumed during a whole request.
+            Taken from django.utils.deprecation::MiddlewareMixin._async_check
+            """
+            if iscoroutinefunction(self.get_response):
+                markcoroutinefunction(self)
+
+        def async_route_check(self):
+            # type: () -> bool
+            """
+            Function that checks if we are in async mode,
+            and if we are forwards the handling of requests to __acall__
+            """
+            return iscoroutinefunction(self.get_response)
+
+        async def __acall__(self, *args, **kwargs):
+            # type: (*Any, **Any) -> Any
+            f = self._acall_method
+            if f is None:
+                if hasattr(self._inner, "__acall__"):
+                    self._acall_method = f = self._inner.__acall__  # type: ignore
+                else:
+                    self._acall_method = f = self._inner
+
+            middleware_span = _check_middleware_span(old_method=f)
+
+            if middleware_span is None:
+                return await f(*args, **kwargs)
+
+            with middleware_span:
+                return await f(*args, **kwargs)
+
+    return SentryASGIMixin
diff --git a/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/django/caching.py b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/django/caching.py
new file mode 100644
index 00000000..79856117
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/django/caching.py
@@ -0,0 +1,191 @@
+import functools
+from typing import TYPE_CHECKING
+from sentry_sdk.integrations.redis.utils import _get_safe_key, _key_as_string
+from urllib3.util import parse_url as urlparse
+
+from django import VERSION as DJANGO_VERSION
+from django.core.cache import CacheHandler
+
+import sentry_sdk
+from sentry_sdk.consts import OP, SPANDATA
+from sentry_sdk.utils import (
+    capture_internal_exceptions,
+    ensure_integration_enabled,
+)
+
+
+if TYPE_CHECKING:
+    from typing import Any
+    from typing import Callable
+    from typing import Optional
+
+
+METHODS_TO_INSTRUMENT = [
+    "set",
+    "set_many",
+    "get",
+    "get_many",
+]
+
+
+def _get_span_description(method_name, args, kwargs):
+    # type: (str, tuple[Any], dict[str, Any]) -> str
+    return _key_as_string(_get_safe_key(method_name, args, kwargs))
+
+
+def _patch_cache_method(cache, method_name, address, port):
+    # type: (CacheHandler, str, Optional[str], Optional[int]) -> None
+    from sentry_sdk.integrations.django import DjangoIntegration
+
+    original_method = getattr(cache, method_name)
+
+    @ensure_integration_enabled(DjangoIntegration, original_method)
+    def _instrument_call(
+        cache, method_name, original_method, args, kwargs, address, port
+    ):
+        # type: (CacheHandler, str, Callable[..., Any], tuple[Any, ...], dict[str, Any], Optional[str], Optional[int]) -> Any
+        is_set_operation = method_name.startswith("set")
+        is_get_operation = not is_set_operation
+
+        op = OP.CACHE_PUT if is_set_operation else OP.CACHE_GET
+        description = _get_span_description(method_name, args, kwargs)
+
+        with sentry_sdk.start_span(
+            op=op,
+            name=description,
+            origin=DjangoIntegration.origin,
+        ) as span:
+            value = original_method(*args, **kwargs)
+
+            with capture_internal_exceptions():
+                if address is not None:
+                    span.set_data(SPANDATA.NETWORK_PEER_ADDRESS, address)
+
+                if port is not None:
+                    span.set_data(SPANDATA.NETWORK_PEER_PORT, port)
+
+                key = _get_safe_key(method_name, args, kwargs)
+                if key is not None:
+                    span.set_data(SPANDATA.CACHE_KEY, key)
+
+                item_size = None
+                if is_get_operation:
+                    if value:
+                        item_size = len(str(value))
+                        span.set_data(SPANDATA.CACHE_HIT, True)
+                    else:
+                        span.set_data(SPANDATA.CACHE_HIT, False)
+                else:  # TODO: We don't handle `get_or_set` which we should
+                    arg_count = len(args)
+                    if arg_count >= 2:
+                        # 'set' command
+                        item_size = len(str(args[1]))
+                    elif arg_count == 1:
+                        # 'set_many' command
+                        item_size = len(str(args[0]))
+
+                if item_size is not None:
+                    span.set_data(SPANDATA.CACHE_ITEM_SIZE, item_size)
+
+            return value
+
+    @functools.wraps(original_method)
+    def sentry_method(*args, **kwargs):
+        # type: (*Any, **Any) -> Any
+        return _instrument_call(
+            cache, method_name, original_method, args, kwargs, address, port
+        )
+
+    setattr(cache, method_name, sentry_method)
+
+
+def _patch_cache(cache, address=None, port=None):
+    # type: (CacheHandler, Optional[str], Optional[int]) -> None
+    if not hasattr(cache, "_sentry_patched"):
+        for method_name in METHODS_TO_INSTRUMENT:
+            _patch_cache_method(cache, method_name, address, port)
+        cache._sentry_patched = True
+
+
+def _get_address_port(settings):
+    # type: (dict[str, Any]) -> tuple[Optional[str], Optional[int]]
+    location = settings.get("LOCATION")
+
+    # TODO: location can also be an array of locations
+    #       see: https://docs.djangoproject.com/en/5.0/topics/cache/#redis
+    #       GitHub issue: https://github.com/getsentry/sentry-python/issues/3062
+    if not isinstance(location, str):
+        return None, None
+
+    if "://" in location:
+        parsed_url = urlparse(location)
+        # remove the username and password from URL to not leak sensitive data.
+        address = "{}://{}{}".format(
+            parsed_url.scheme or "",
+            parsed_url.hostname or "",
+            parsed_url.path or "",
+        )
+        port = parsed_url.port
+    else:
+        address = location
+        port = None
+
+    return address, int(port) if port is not None else None
+
+
+def should_enable_cache_spans():
+    # type: () -> bool
+    from sentry_sdk.integrations.django import DjangoIntegration
+
+    client = sentry_sdk.get_client()
+    integration = client.get_integration(DjangoIntegration)
+    from django.conf import settings
+
+    return integration is not None and (
+        (client.spotlight is not None and settings.DEBUG is True)
+        or integration.cache_spans is True
+    )
+
+
+def patch_caching():
+    # type: () -> None
+    if not hasattr(CacheHandler, "_sentry_patched"):
+        if DJANGO_VERSION < (3, 2):
+            original_get_item = CacheHandler.__getitem__
+
+            @functools.wraps(original_get_item)
+            def sentry_get_item(self, alias):
+                # type: (CacheHandler, str) -> Any
+                cache = original_get_item(self, alias)
+
+                if should_enable_cache_spans():
+                    from django.conf import settings
+
+                    address, port = _get_address_port(
+                        settings.CACHES[alias or "default"]
+                    )
+
+                    _patch_cache(cache, address, port)
+
+                return cache
+
+            CacheHandler.__getitem__ = sentry_get_item
+            CacheHandler._sentry_patched = True
+
+        else:
+            original_create_connection = CacheHandler.create_connection
+
+            @functools.wraps(original_create_connection)
+            def sentry_create_connection(self, alias):
+                # type: (CacheHandler, str) -> Any
+                cache = original_create_connection(self, alias)
+
+                if should_enable_cache_spans():
+                    address, port = _get_address_port(self.settings[alias or "default"])
+
+                    _patch_cache(cache, address, port)
+
+                return cache
+
+            CacheHandler.create_connection = sentry_create_connection
+            CacheHandler._sentry_patched = True
diff --git a/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/django/middleware.py b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/django/middleware.py
new file mode 100644
index 00000000..24527656
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/django/middleware.py
@@ -0,0 +1,187 @@
+"""
+Create spans from Django middleware invocations
+"""
+
+from functools import wraps
+
+from django import VERSION as DJANGO_VERSION
+
+import sentry_sdk
+from sentry_sdk.consts import OP
+from sentry_sdk.utils import (
+    ContextVar,
+    transaction_from_function,
+    capture_internal_exceptions,
+)
+
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+    from typing import Any
+    from typing import Callable
+    from typing import Optional
+    from typing import TypeVar
+
+    from sentry_sdk.tracing import Span
+
+    F = TypeVar("F", bound=Callable[..., Any])
+
+_import_string_should_wrap_middleware = ContextVar(
+    "import_string_should_wrap_middleware"
+)
+
+DJANGO_SUPPORTS_ASYNC_MIDDLEWARE = DJANGO_VERSION >= (3, 1)
+
+if not DJANGO_SUPPORTS_ASYNC_MIDDLEWARE:
+    _asgi_middleware_mixin_factory = lambda _: object
+else:
+    from .asgi import _asgi_middleware_mixin_factory
+
+
+def patch_django_middlewares():
+    # type: () -> None
+    from django.core.handlers import base
+
+    old_import_string = base.import_string
+
+    def sentry_patched_import_string(dotted_path):
+        # type: (str) -> Any
+        rv = old_import_string(dotted_path)
+
+        if _import_string_should_wrap_middleware.get(None):
+            rv = _wrap_middleware(rv, dotted_path)
+
+        return rv
+
+    base.import_string = sentry_patched_import_string
+
+    old_load_middleware = base.BaseHandler.load_middleware
+
+    def sentry_patched_load_middleware(*args, **kwargs):
+        # type: (Any, Any) -> Any
+        _import_string_should_wrap_middleware.set(True)
+        try:
+            return old_load_middleware(*args, **kwargs)
+        finally:
+            _import_string_should_wrap_middleware.set(False)
+
+    base.BaseHandler.load_middleware = sentry_patched_load_middleware
+
+
+def _wrap_middleware(middleware, middleware_name):
+    # type: (Any, str) -> Any
+    from sentry_sdk.integrations.django import DjangoIntegration
+
+    def _check_middleware_span(old_method):
+        # type: (Callable[..., Any]) -> Optional[Span]
+        integration = sentry_sdk.get_client().get_integration(DjangoIntegration)
+        if integration is None or not integration.middleware_spans:
+            return None
+
+        function_name = transaction_from_function(old_method)
+
+        description = middleware_name
+        function_basename = getattr(old_method, "__name__", None)
+        if function_basename:
+            description = "{}.{}".format(description, function_basename)
+
+        middleware_span = sentry_sdk.start_span(
+            op=OP.MIDDLEWARE_DJANGO,
+            name=description,
+            origin=DjangoIntegration.origin,
+        )
+        middleware_span.set_tag("django.function_name", function_name)
+        middleware_span.set_tag("django.middleware_name", middleware_name)
+
+        return middleware_span
+
+    def _get_wrapped_method(old_method):
+        # type: (F) -> F
+        with capture_internal_exceptions():
+
+            def sentry_wrapped_method(*args, **kwargs):
+                # type: (*Any, **Any) -> Any
+                middleware_span = _check_middleware_span(old_method)
+
+                if middleware_span is None:
+                    return old_method(*args, **kwargs)
+
+                with middleware_span:
+                    return old_method(*args, **kwargs)
+
+            try:
+                # fails for __call__ of function on Python 2 (see py2.7-django-1.11)
+                sentry_wrapped_method = wraps(old_method)(sentry_wrapped_method)
+
+                # Necessary for Django 3.1
+                sentry_wrapped_method.__self__ = old_method.__self__  # type: ignore
+            except Exception:
+                pass
+
+            return sentry_wrapped_method  # type: ignore
+
+        return old_method
+
+    class SentryWrappingMiddleware(
+        _asgi_middleware_mixin_factory(_check_middleware_span)  # type: ignore
+    ):
+        sync_capable = getattr(middleware, "sync_capable", True)
+        async_capable = DJANGO_SUPPORTS_ASYNC_MIDDLEWARE and getattr(
+            middleware, "async_capable", False
+        )
+
+        def __init__(self, get_response=None, *args, **kwargs):
+            # type: (Optional[Callable[..., Any]], *Any, **Any) -> None
+            if get_response:
+                self._inner = middleware(get_response, *args, **kwargs)
+            else:
+                self._inner = middleware(*args, **kwargs)
+            self.get_response = get_response
+            self._call_method = None
+            if self.async_capable:
+                super().__init__(get_response)
+
+        # We need correct behavior for `hasattr()`, which we can only determine
+        # when we have an instance of the middleware we're wrapping.
+        def __getattr__(self, method_name):
+            # type: (str) -> Any
+            if method_name not in (
+                "process_request",
+                "process_view",
+                "process_template_response",
+                "process_response",
+                "process_exception",
+            ):
+                raise AttributeError()
+
+            old_method = getattr(self._inner, method_name)
+            rv = _get_wrapped_method(old_method)
+            self.__dict__[method_name] = rv
+            return rv
+
+        def __call__(self, *args, **kwargs):
+            # type: (*Any, **Any) -> Any
+            if hasattr(self, "async_route_check") and self.async_route_check():
+                return self.__acall__(*args, **kwargs)
+
+            f = self._call_method
+            if f is None:
+                self._call_method = f = self._inner.__call__
+
+            middleware_span = _check_middleware_span(old_method=f)
+
+            if middleware_span is None:
+                return f(*args, **kwargs)
+
+            with middleware_span:
+                return f(*args, **kwargs)
+
+    for attr in (
+        "__name__",
+        "__module__",
+        "__qualname__",
+    ):
+        if hasattr(middleware, attr):
+            setattr(SentryWrappingMiddleware, attr, getattr(middleware, attr))
+
+    return SentryWrappingMiddleware
diff --git a/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/django/signals_handlers.py b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/django/signals_handlers.py
new file mode 100644
index 00000000..cb0f8b9d
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/django/signals_handlers.py
@@ -0,0 +1,91 @@
+from functools import wraps
+
+from django.dispatch import Signal
+
+import sentry_sdk
+from sentry_sdk.consts import OP
+from sentry_sdk.integrations.django import DJANGO_VERSION
+
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+    from collections.abc import Callable
+    from typing import Any, Union
+
+
+def _get_receiver_name(receiver):
+    # type: (Callable[..., Any]) -> str
+    name = ""
+
+    if hasattr(receiver, "__qualname__"):
+        name = receiver.__qualname__
+    elif hasattr(receiver, "__name__"):  # Python 2.7 has no __qualname__
+        name = receiver.__name__
+    elif hasattr(
+        receiver, "func"
+    ):  # certain functions (like partials) dont have a name
+        if hasattr(receiver, "func") and hasattr(receiver.func, "__name__"):
+            name = "partial(<function " + receiver.func.__name__ + ">)"
+
+    if (
+        name == ""
+    ):  # In case nothing was found, return the string representation (this is the slowest case)
+        return str(receiver)
+
+    if hasattr(receiver, "__module__"):  # prepend with module, if there is one
+        name = receiver.__module__ + "." + name
+
+    return name
+
+
+def patch_signals():
+    # type: () -> None
+    """
+    Patch django signal receivers to create a span.
+
+    This only wraps sync receivers. Django>=5.0 introduced async receivers, but
+    since we don't create transactions for ASGI Django, we don't wrap them.
+    """
+    from sentry_sdk.integrations.django import DjangoIntegration
+
+    old_live_receivers = Signal._live_receivers
+
+    def _sentry_live_receivers(self, sender):
+        # type: (Signal, Any) -> Union[tuple[list[Callable[..., Any]], list[Callable[..., Any]]], list[Callable[..., Any]]]
+        if DJANGO_VERSION >= (5, 0):
+            sync_receivers, async_receivers = old_live_receivers(self, sender)
+        else:
+            sync_receivers = old_live_receivers(self, sender)
+            async_receivers = []
+
+        def sentry_sync_receiver_wrapper(receiver):
+            # type: (Callable[..., Any]) -> Callable[..., Any]
+            @wraps(receiver)
+            def wrapper(*args, **kwargs):
+                # type: (Any, Any) -> Any
+                signal_name = _get_receiver_name(receiver)
+                with sentry_sdk.start_span(
+                    op=OP.EVENT_DJANGO,
+                    name=signal_name,
+                    origin=DjangoIntegration.origin,
+                ) as span:
+                    span.set_data("signal", signal_name)
+                    return receiver(*args, **kwargs)
+
+            return wrapper
+
+        integration = sentry_sdk.get_client().get_integration(DjangoIntegration)
+        if (
+            integration
+            and integration.signals_spans
+            and self not in integration.signals_denylist
+        ):
+            for idx, receiver in enumerate(sync_receivers):
+                sync_receivers[idx] = sentry_sync_receiver_wrapper(receiver)
+
+        if DJANGO_VERSION >= (5, 0):
+            return sync_receivers, async_receivers
+        else:
+            return sync_receivers
+
+    Signal._live_receivers = _sentry_live_receivers
diff --git a/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/django/templates.py b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/django/templates.py
new file mode 100644
index 00000000..10e8a924
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/django/templates.py
@@ -0,0 +1,188 @@
+import functools
+
+from django.template import TemplateSyntaxError
+from django.utils.safestring import mark_safe
+from django import VERSION as DJANGO_VERSION
+
+import sentry_sdk
+from sentry_sdk.consts import OP
+from sentry_sdk.utils import ensure_integration_enabled
+
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+    from typing import Any
+    from typing import Dict
+    from typing import Optional
+    from typing import Iterator
+    from typing import Tuple
+
+try:
+    # support Django 1.9
+    from django.template.base import Origin
+except ImportError:
+    # backward compatibility
+    from django.template.loader import LoaderOrigin as Origin
+
+
+def get_template_frame_from_exception(exc_value):
+    # type: (Optional[BaseException]) -> Optional[Dict[str, Any]]
+
+    # As of Django 1.9 or so the new template debug thing showed up.
+    if hasattr(exc_value, "template_debug"):
+        return _get_template_frame_from_debug(exc_value.template_debug)  # type: ignore
+
+    # As of r16833 (Django) all exceptions may contain a
+    # ``django_template_source`` attribute (rather than the legacy
+    # ``TemplateSyntaxError.source`` check)
+    if hasattr(exc_value, "django_template_source"):
+        return _get_template_frame_from_source(
+            exc_value.django_template_source  # type: ignore
+        )
+
+    if isinstance(exc_value, TemplateSyntaxError) and hasattr(exc_value, "source"):
+        source = exc_value.source
+        if isinstance(source, (tuple, list)) and isinstance(source[0], Origin):
+            return _get_template_frame_from_source(source)  # type: ignore
+
+    return None
+
+
+def _get_template_name_description(template_name):
+    # type: (str) -> str
+    if isinstance(template_name, (list, tuple)):
+        if template_name:
+            return "[{}, ...]".format(template_name[0])
+    else:
+        return template_name
+
+
+def patch_templates():
+    # type: () -> None
+    from django.template.response import SimpleTemplateResponse
+    from sentry_sdk.integrations.django import DjangoIntegration
+
+    real_rendered_content = SimpleTemplateResponse.rendered_content
+
+    @property  # type: ignore
+    @ensure_integration_enabled(DjangoIntegration, real_rendered_content.fget)
+    def rendered_content(self):
+        # type: (SimpleTemplateResponse) -> str
+        with sentry_sdk.start_span(
+            op=OP.TEMPLATE_RENDER,
+            name=_get_template_name_description(self.template_name),
+            origin=DjangoIntegration.origin,
+        ) as span:
+            span.set_data("context", self.context_data)
+            return real_rendered_content.fget(self)
+
+    SimpleTemplateResponse.rendered_content = rendered_content
+
+    if DJANGO_VERSION < (1, 7):
+        return
+    import django.shortcuts
+
+    real_render = django.shortcuts.render
+
+    @functools.wraps(real_render)
+    @ensure_integration_enabled(DjangoIntegration, real_render)
+    def render(request, template_name, context=None, *args, **kwargs):
+        # type: (django.http.HttpRequest, str, Optional[Dict[str, Any]], *Any, **Any) -> django.http.HttpResponse
+
+        # Inject trace meta tags into template context
+        context = context or {}
+        if "sentry_trace_meta" not in context:
+            context["sentry_trace_meta"] = mark_safe(
+                sentry_sdk.get_current_scope().trace_propagation_meta()
+            )
+
+        with sentry_sdk.start_span(
+            op=OP.TEMPLATE_RENDER,
+            name=_get_template_name_description(template_name),
+            origin=DjangoIntegration.origin,
+        ) as span:
+            span.set_data("context", context)
+            return real_render(request, template_name, context, *args, **kwargs)
+
+    django.shortcuts.render = render
+
+
+def _get_template_frame_from_debug(debug):
+    # type: (Dict[str, Any]) -> Dict[str, Any]
+    if debug is None:
+        return None
+
+    lineno = debug["line"]
+    filename = debug["name"]
+    if filename is None:
+        filename = "<django template>"
+
+    pre_context = []
+    post_context = []
+    context_line = None
+
+    for i, line in debug["source_lines"]:
+        if i < lineno:
+            pre_context.append(line)
+        elif i > lineno:
+            post_context.append(line)
+        else:
+            context_line = line
+
+    return {
+        "filename": filename,
+        "lineno": lineno,
+        "pre_context": pre_context[-5:],
+        "post_context": post_context[:5],
+        "context_line": context_line,
+        "in_app": True,
+    }
+
+
+def _linebreak_iter(template_source):
+    # type: (str) -> Iterator[int]
+    yield 0
+    p = template_source.find("\n")
+    while p >= 0:
+        yield p + 1
+        p = template_source.find("\n", p + 1)
+
+
+def _get_template_frame_from_source(source):
+    # type: (Tuple[Origin, Tuple[int, int]]) -> Optional[Dict[str, Any]]
+    if not source:
+        return None
+
+    origin, (start, end) = source
+    filename = getattr(origin, "loadname", None)
+    if filename is None:
+        filename = "<django template>"
+    template_source = origin.reload()
+    lineno = None
+    upto = 0
+    pre_context = []
+    post_context = []
+    context_line = None
+
+    for num, next in enumerate(_linebreak_iter(template_source)):
+        line = template_source[upto:next]
+        if start >= upto and end <= next:
+            lineno = num
+            context_line = line
+        elif lineno is None:
+            pre_context.append(line)
+        else:
+            post_context.append(line)
+
+        upto = next
+
+    if context_line is None or lineno is None:
+        return None
+
+    return {
+        "filename": filename,
+        "lineno": lineno,
+        "pre_context": pre_context[-5:],
+        "post_context": post_context[:5],
+        "context_line": context_line,
+    }
diff --git a/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/django/transactions.py b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/django/transactions.py
new file mode 100644
index 00000000..5a7d69f3
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/django/transactions.py
@@ -0,0 +1,159 @@
+"""
+Copied from raven-python.
+
+Despite being called "legacy" in some places this resolver is very much still
+in use.
+"""
+
+import re
+
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+    from django.urls.resolvers import URLResolver
+    from typing import Dict
+    from typing import List
+    from typing import Optional
+    from django.urls.resolvers import URLPattern
+    from typing import Tuple
+    from typing import Union
+    from re import Pattern
+
+from django import VERSION as DJANGO_VERSION
+
+if DJANGO_VERSION >= (2, 0):
+    from django.urls.resolvers import RoutePattern
+else:
+    RoutePattern = None
+
+try:
+    from django.urls import get_resolver
+except ImportError:
+    from django.core.urlresolvers import get_resolver
+
+
+def get_regex(resolver_or_pattern):
+    # type: (Union[URLPattern, URLResolver]) -> Pattern[str]
+    """Utility method for django's deprecated resolver.regex"""
+    try:
+        regex = resolver_or_pattern.regex
+    except AttributeError:
+        regex = resolver_or_pattern.pattern.regex
+    return regex
+
+
+class RavenResolver:
+    _new_style_group_matcher = re.compile(
+        r"<(?:([^>:]+):)?([^>]+)>"
+    )  # https://github.com/django/django/blob/21382e2743d06efbf5623e7c9b6dccf2a325669b/django/urls/resolvers.py#L245-L247
+    _optional_group_matcher = re.compile(r"\(\?\:([^\)]+)\)")
+    _named_group_matcher = re.compile(r"\(\?P<(\w+)>[^\)]+\)+")
+    _non_named_group_matcher = re.compile(r"\([^\)]+\)")
+    # [foo|bar|baz]
+    _either_option_matcher = re.compile(r"\[([^\]]+)\|([^\]]+)\]")
+    _camel_re = re.compile(r"([A-Z]+)([a-z])")
+
+    _cache = {}  # type: Dict[URLPattern, str]
+
+    def _simplify(self, pattern):
+        # type: (Union[URLPattern, URLResolver]) -> str
+        r"""
+        Clean up urlpattern regexes into something readable by humans:
+
+        From:
+        > "^(?P<sport_slug>\w+)/athletes/(?P<athlete_slug>\w+)/$"
+
+        To:
+        > "{sport_slug}/athletes/{athlete_slug}/"
+        """
+        # "new-style" path patterns can be parsed directly without turning them
+        # into regexes first
+        if (
+            RoutePattern is not None
+            and hasattr(pattern, "pattern")
+            and isinstance(pattern.pattern, RoutePattern)
+        ):
+            return self._new_style_group_matcher.sub(
+                lambda m: "{%s}" % m.group(2), str(pattern.pattern._route)
+            )
+
+        result = get_regex(pattern).pattern
+
+        # remove optional params
+        # TODO(dcramer): it'd be nice to change these into [%s] but it currently
+        # conflicts with the other rules because we're doing regexp matches
+        # rather than parsing tokens
+        result = self._optional_group_matcher.sub(lambda m: "%s" % m.group(1), result)
+
+        # handle named groups first
+        result = self._named_group_matcher.sub(lambda m: "{%s}" % m.group(1), result)
+
+        # handle non-named groups
+        result = self._non_named_group_matcher.sub("{var}", result)
+
+        # handle optional params
+        result = self._either_option_matcher.sub(lambda m: m.group(1), result)
+
+        # clean up any outstanding regex-y characters.
+        result = (
+            result.replace("^", "")
+            .replace("$", "")
+            .replace("?", "")
+            .replace("\\A", "")
+            .replace("\\Z", "")
+            .replace("//", "/")
+            .replace("\\", "")
+        )
+
+        return result
+
+    def _resolve(self, resolver, path, parents=None):
+        # type: (URLResolver, str, Optional[List[URLResolver]]) -> Optional[str]
+
+        match = get_regex(resolver).search(path)  # Django < 2.0
+
+        if not match:
+            return None
+
+        if parents is None:
+            parents = [resolver]
+        elif resolver not in parents:
+            parents = parents + [resolver]
+
+        new_path = path[match.end() :]
+        for pattern in resolver.url_patterns:
+            # this is an include()
+            if not pattern.callback:
+                match_ = self._resolve(pattern, new_path, parents)
+                if match_:
+                    return match_
+                continue
+            elif not get_regex(pattern).search(new_path):
+                continue
+
+            try:
+                return self._cache[pattern]
+            except KeyError:
+                pass
+
+            prefix = "".join(self._simplify(p) for p in parents)
+            result = prefix + self._simplify(pattern)
+            if not result.startswith("/"):
+                result = "/" + result
+            self._cache[pattern] = result
+            return result
+
+        return None
+
+    def resolve(
+        self,
+        path,  # type: str
+        urlconf=None,  # type: Union[None, Tuple[URLPattern, URLPattern, URLResolver], Tuple[URLPattern]]
+    ):
+        # type: (...) -> Optional[str]
+        resolver = get_resolver(urlconf)
+        match = self._resolve(resolver, path)
+        return match
+
+
+LEGACY_RESOLVER = RavenResolver()
diff --git a/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/django/views.py b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/django/views.py
new file mode 100644
index 00000000..0a9861a6
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/django/views.py
@@ -0,0 +1,96 @@
+import functools
+
+import sentry_sdk
+from sentry_sdk.consts import OP
+
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+    from typing import Any
+
+
+try:
+    from asyncio import iscoroutinefunction
+except ImportError:
+    iscoroutinefunction = None  # type: ignore
+
+
+try:
+    from sentry_sdk.integrations.django.asgi import wrap_async_view
+except (ImportError, SyntaxError):
+    wrap_async_view = None  # type: ignore
+
+
+def patch_views():
+    # type: () -> None
+
+    from django.core.handlers.base import BaseHandler
+    from django.template.response import SimpleTemplateResponse
+    from sentry_sdk.integrations.django import DjangoIntegration
+
+    old_make_view_atomic = BaseHandler.make_view_atomic
+    old_render = SimpleTemplateResponse.render
+
+    def sentry_patched_render(self):
+        # type: (SimpleTemplateResponse) -> Any
+        with sentry_sdk.start_span(
+            op=OP.VIEW_RESPONSE_RENDER,
+            name="serialize response",
+            origin=DjangoIntegration.origin,
+        ):
+            return old_render(self)
+
+    @functools.wraps(old_make_view_atomic)
+    def sentry_patched_make_view_atomic(self, *args, **kwargs):
+        # type: (Any, *Any, **Any) -> Any
+        callback = old_make_view_atomic(self, *args, **kwargs)
+
+        # XXX: The wrapper function is created for every request. Find more
+        # efficient way to wrap views (or build a cache?)
+
+        integration = sentry_sdk.get_client().get_integration(DjangoIntegration)
+        if integration is not None and integration.middleware_spans:
+            is_async_view = (
+                iscoroutinefunction is not None
+                and wrap_async_view is not None
+                and iscoroutinefunction(callback)
+            )
+            if is_async_view:
+                sentry_wrapped_callback = wrap_async_view(callback)
+            else:
+                sentry_wrapped_callback = _wrap_sync_view(callback)
+
+        else:
+            sentry_wrapped_callback = callback
+
+        return sentry_wrapped_callback
+
+    SimpleTemplateResponse.render = sentry_patched_render
+    BaseHandler.make_view_atomic = sentry_patched_make_view_atomic
+
+
+def _wrap_sync_view(callback):
+    # type: (Any) -> Any
+    from sentry_sdk.integrations.django import DjangoIntegration
+
+    @functools.wraps(callback)
+    def sentry_wrapped_callback(request, *args, **kwargs):
+        # type: (Any, *Any, **Any) -> Any
+        current_scope = sentry_sdk.get_current_scope()
+        if current_scope.transaction is not None:
+            current_scope.transaction.update_active_thread()
+
+        sentry_scope = sentry_sdk.get_isolation_scope()
+        # set the active thread id to the handler thread for sync views
+        # this isn't necessary for async views since that runs on main
+        if sentry_scope.profile is not None:
+            sentry_scope.profile.update_active_thread_id()
+
+        with sentry_sdk.start_span(
+            op=OP.VIEW_RENDER,
+            name=request.resolver_match.view_name,
+            origin=DjangoIntegration.origin,
+        ):
+            return callback(request, *args, **kwargs)
+
+    return sentry_wrapped_callback
diff --git a/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/dramatiq.py b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/dramatiq.py
new file mode 100644
index 00000000..f9ef13e2
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/dramatiq.py
@@ -0,0 +1,168 @@
+import json
+
+import sentry_sdk
+from sentry_sdk.integrations import Integration
+from sentry_sdk.integrations._wsgi_common import request_body_within_bounds
+from sentry_sdk.utils import (
+    AnnotatedValue,
+    capture_internal_exceptions,
+    event_from_exception,
+)
+
+from dramatiq.broker import Broker  # type: ignore
+from dramatiq.message import Message  # type: ignore
+from dramatiq.middleware import Middleware, default_middleware  # type: ignore
+from dramatiq.errors import Retry  # type: ignore
+
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+    from typing import Any, Callable, Dict, Optional, Union
+    from sentry_sdk._types import Event, Hint
+
+
+class DramatiqIntegration(Integration):
+    """
+    Dramatiq integration for Sentry
+
+    Please make sure that you call `sentry_sdk.init` *before* initializing
+    your broker, as it monkey patches `Broker.__init__`.
+
+    This integration was originally developed and maintained
+    by https://github.com/jacobsvante and later donated to the Sentry
+    project.
+    """
+
+    identifier = "dramatiq"
+
+    @staticmethod
+    def setup_once():
+        # type: () -> None
+        _patch_dramatiq_broker()
+
+
+def _patch_dramatiq_broker():
+    # type: () -> None
+    original_broker__init__ = Broker.__init__
+
+    def sentry_patched_broker__init__(self, *args, **kw):
+        # type: (Broker, *Any, **Any) -> None
+        integration = sentry_sdk.get_client().get_integration(DramatiqIntegration)
+
+        try:
+            middleware = kw.pop("middleware")
+        except KeyError:
+            # Unfortunately Broker and StubBroker allows middleware to be
+            # passed in as positional arguments, whilst RabbitmqBroker and
+            # RedisBroker does not.
+            if len(args) == 1:
+                middleware = args[0]
+                args = []  # type: ignore
+            else:
+                middleware = None
+
+        if middleware is None:
+            middleware = list(m() for m in default_middleware)
+        else:
+            middleware = list(middleware)
+
+        if integration is not None:
+            middleware = [m for m in middleware if not isinstance(m, SentryMiddleware)]
+            middleware.insert(0, SentryMiddleware())
+
+        kw["middleware"] = middleware
+        original_broker__init__(self, *args, **kw)
+
+    Broker.__init__ = sentry_patched_broker__init__
+
+
+class SentryMiddleware(Middleware):  # type: ignore[misc]
+    """
+    A Dramatiq middleware that automatically captures and sends
+    exceptions to Sentry.
+
+    This is automatically added to every instantiated broker via the
+    DramatiqIntegration.
+    """
+
+    def before_process_message(self, broker, message):
+        # type: (Broker, Message) -> None
+        integration = sentry_sdk.get_client().get_integration(DramatiqIntegration)
+        if integration is None:
+            return
+
+        message._scope_manager = sentry_sdk.new_scope()
+        message._scope_manager.__enter__()
+
+        scope = sentry_sdk.get_current_scope()
+        scope.transaction = message.actor_name
+        scope.set_extra("dramatiq_message_id", message.message_id)
+        scope.add_event_processor(_make_message_event_processor(message, integration))
+
+    def after_process_message(self, broker, message, *, result=None, exception=None):
+        # type: (Broker, Message, Any, Optional[Any], Optional[Exception]) -> None
+        integration = sentry_sdk.get_client().get_integration(DramatiqIntegration)
+        if integration is None:
+            return
+
+        actor = broker.get_actor(message.actor_name)
+        throws = message.options.get("throws") or actor.options.get("throws")
+
+        try:
+            if (
+                exception is not None
+                and not (throws and isinstance(exception, throws))
+                and not isinstance(exception, Retry)
+            ):
+                event, hint = event_from_exception(
+                    exception,
+                    client_options=sentry_sdk.get_client().options,
+                    mechanism={
+                        "type": DramatiqIntegration.identifier,
+                        "handled": False,
+                    },
+                )
+                sentry_sdk.capture_event(event, hint=hint)
+        finally:
+            message._scope_manager.__exit__(None, None, None)
+
+
+def _make_message_event_processor(message, integration):
+    # type: (Message, DramatiqIntegration) -> Callable[[Event, Hint], Optional[Event]]
+
+    def inner(event, hint):
+        # type: (Event, Hint) -> Optional[Event]
+        with capture_internal_exceptions():
+            DramatiqMessageExtractor(message).extract_into_event(event)
+
+        return event
+
+    return inner
+
+
+class DramatiqMessageExtractor:
+    def __init__(self, message):
+        # type: (Message) -> None
+        self.message_data = dict(message.asdict())
+
+    def content_length(self):
+        # type: () -> int
+        return len(json.dumps(self.message_data))
+
+    def extract_into_event(self, event):
+        # type: (Event) -> None
+        client = sentry_sdk.get_client()
+        if not client.is_active():
+            return
+
+        contexts = event.setdefault("contexts", {})
+        request_info = contexts.setdefault("dramatiq", {})
+        request_info["type"] = "dramatiq"
+
+        data = None  # type: Optional[Union[AnnotatedValue, Dict[str, Any]]]
+        if not request_body_within_bounds(client, self.content_length()):
+            data = AnnotatedValue.removed_because_over_size_limit()
+        else:
+            data = self.message_data
+
+        request_info["data"] = data
diff --git a/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/excepthook.py b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/excepthook.py
new file mode 100644
index 00000000..61c7e460
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/excepthook.py
@@ -0,0 +1,83 @@
+import sys
+
+import sentry_sdk
+from sentry_sdk.utils import (
+    capture_internal_exceptions,
+    event_from_exception,
+)
+from sentry_sdk.integrations import Integration
+
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+    from typing import Callable
+    from typing import Any
+    from typing import Type
+    from typing import Optional
+
+    from types import TracebackType
+
+    Excepthook = Callable[
+        [Type[BaseException], BaseException, Optional[TracebackType]],
+        Any,
+    ]
+
+
+class ExcepthookIntegration(Integration):
+    identifier = "excepthook"
+
+    always_run = False
+
+    def __init__(self, always_run=False):
+        # type: (bool) -> None
+
+        if not isinstance(always_run, bool):
+            raise ValueError(
+                "Invalid value for always_run: %s (must be type boolean)"
+                % (always_run,)
+            )
+        self.always_run = always_run
+
+    @staticmethod
+    def setup_once():
+        # type: () -> None
+        sys.excepthook = _make_excepthook(sys.excepthook)
+
+
+def _make_excepthook(old_excepthook):
+    # type: (Excepthook) -> Excepthook
+    def sentry_sdk_excepthook(type_, value, traceback):
+        # type: (Type[BaseException], BaseException, Optional[TracebackType]) -> None
+        integration = sentry_sdk.get_client().get_integration(ExcepthookIntegration)
+
+        # Note: If  we replace this with ensure_integration_enabled then
+        # we break the exceptiongroup backport;
+        # See: https://github.com/getsentry/sentry-python/issues/3097
+        if integration is None:
+            return old_excepthook(type_, value, traceback)
+
+        if _should_send(integration.always_run):
+            with capture_internal_exceptions():
+                event, hint = event_from_exception(
+                    (type_, value, traceback),
+                    client_options=sentry_sdk.get_client().options,
+                    mechanism={"type": "excepthook", "handled": False},
+                )
+                sentry_sdk.capture_event(event, hint=hint)
+
+        return old_excepthook(type_, value, traceback)
+
+    return sentry_sdk_excepthook
+
+
+def _should_send(always_run=False):
+    # type: (bool) -> bool
+    if always_run:
+        return True
+
+    if hasattr(sys, "ps1"):
+        # Disable the excepthook for interactive Python shells, otherwise
+        # every typo gets sent to Sentry.
+        return False
+
+    return True
diff --git a/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/executing.py b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/executing.py
new file mode 100644
index 00000000..6e68b8c0
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/executing.py
@@ -0,0 +1,67 @@
+import sentry_sdk
+from sentry_sdk.integrations import Integration, DidNotEnable
+from sentry_sdk.scope import add_global_event_processor
+from sentry_sdk.utils import walk_exception_chain, iter_stacks
+
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+    from typing import Optional
+
+    from sentry_sdk._types import Event, Hint
+
+try:
+    import executing
+except ImportError:
+    raise DidNotEnable("executing is not installed")
+
+
+class ExecutingIntegration(Integration):
+    identifier = "executing"
+
+    @staticmethod
+    def setup_once():
+        # type: () -> None
+
+        @add_global_event_processor
+        def add_executing_info(event, hint):
+            # type: (Event, Optional[Hint]) -> Optional[Event]
+            if sentry_sdk.get_client().get_integration(ExecutingIntegration) is None:
+                return event
+
+            if hint is None:
+                return event
+
+            exc_info = hint.get("exc_info", None)
+
+            if exc_info is None:
+                return event
+
+            exception = event.get("exception", None)
+
+            if exception is None:
+                return event
+
+            values = exception.get("values", None)
+
+            if values is None:
+                return event
+
+            for exception, (_exc_type, _exc_value, exc_tb) in zip(
+                reversed(values), walk_exception_chain(exc_info)
+            ):
+                sentry_frames = [
+                    frame
+                    for frame in exception.get("stacktrace", {}).get("frames", [])
+                    if frame.get("function")
+                ]
+                tbs = list(iter_stacks(exc_tb))
+                if len(sentry_frames) != len(tbs):
+                    continue
+
+                for sentry_frame, tb in zip(sentry_frames, tbs):
+                    frame = tb.tb_frame
+                    source = executing.Source.for_frame(frame)
+                    sentry_frame["function"] = source.code_qualname(frame.f_code)
+
+            return event
diff --git a/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/falcon.py b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/falcon.py
new file mode 100644
index 00000000..ddedcb10
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/falcon.py
@@ -0,0 +1,272 @@
+import sentry_sdk
+from sentry_sdk.integrations import _check_minimum_version, Integration, DidNotEnable
+from sentry_sdk.integrations._wsgi_common import RequestExtractor
+from sentry_sdk.integrations.wsgi import SentryWsgiMiddleware
+from sentry_sdk.tracing import SOURCE_FOR_STYLE
+from sentry_sdk.utils import (
+    capture_internal_exceptions,
+    ensure_integration_enabled,
+    event_from_exception,
+    parse_version,
+)
+
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+    from typing import Any
+    from typing import Dict
+    from typing import Optional
+
+    from sentry_sdk._types import Event, EventProcessor
+
+# In Falcon 3.0 `falcon.api_helpers` is renamed to `falcon.app_helpers`
+# and `falcon.API` to `falcon.App`
+
+try:
+    import falcon  # type: ignore
+
+    from falcon import __version__ as FALCON_VERSION
+except ImportError:
+    raise DidNotEnable("Falcon not installed")
+
+try:
+    import falcon.app_helpers  # type: ignore
+
+    falcon_helpers = falcon.app_helpers
+    falcon_app_class = falcon.App
+    FALCON3 = True
+except ImportError:
+    import falcon.api_helpers  # type: ignore
+
+    falcon_helpers = falcon.api_helpers
+    falcon_app_class = falcon.API
+    FALCON3 = False
+
+
+_FALCON_UNSET = None  # type: Optional[object]
+if FALCON3:  # falcon.request._UNSET is only available in Falcon 3.0+
+    with capture_internal_exceptions():
+        from falcon.request import _UNSET as _FALCON_UNSET  # type: ignore[import-not-found, no-redef]
+
+
+class FalconRequestExtractor(RequestExtractor):
+    def env(self):
+        # type: () -> Dict[str, Any]
+        return self.request.env
+
+    def cookies(self):
+        # type: () -> Dict[str, Any]
+        return self.request.cookies
+
+    def form(self):
+        # type: () -> None
+        return None  # No such concept in Falcon
+
+    def files(self):
+        # type: () -> None
+        return None  # No such concept in Falcon
+
+    def raw_data(self):
+        # type: () -> Optional[str]
+
+        # As request data can only be read once we won't make this available
+        # to Sentry. Just send back a dummy string in case there was a
+        # content length.
+        # TODO(jmagnusson): Figure out if there's a way to support this
+        content_length = self.content_length()
+        if content_length > 0:
+            return "[REQUEST_CONTAINING_RAW_DATA]"
+        else:
+            return None
+
+    def json(self):
+        # type: () -> Optional[Dict[str, Any]]
+        # fallback to cached_media = None if self.request._media is not available
+        cached_media = None
+        with capture_internal_exceptions():
+            # self.request._media is the cached self.request.media
+            # value. It is only available if self.request.media
+            # has already been accessed. Therefore, reading
+            # self.request._media will not exhaust the raw request
+            # stream (self.request.bounded_stream) because it has
+            # already been read if self.request._media is set.
+            cached_media = self.request._media
+
+        if cached_media is not _FALCON_UNSET:
+            return cached_media
+
+        return None
+
+
+class SentryFalconMiddleware:
+    """Captures exceptions in Falcon requests and send to Sentry"""
+
+    def process_request(self, req, resp, *args, **kwargs):
+        # type: (Any, Any, *Any, **Any) -> None
+        integration = sentry_sdk.get_client().get_integration(FalconIntegration)
+        if integration is None:
+            return
+
+        scope = sentry_sdk.get_isolation_scope()
+        scope._name = "falcon"
+        scope.add_event_processor(_make_request_event_processor(req, integration))
+
+
+TRANSACTION_STYLE_VALUES = ("uri_template", "path")
+
+
+class FalconIntegration(Integration):
+    identifier = "falcon"
+    origin = f"auto.http.{identifier}"
+
+    transaction_style = ""
+
+    def __init__(self, transaction_style="uri_template"):
+        # type: (str) -> None
+        if transaction_style not in TRANSACTION_STYLE_VALUES:
+            raise ValueError(
+                "Invalid value for transaction_style: %s (must be in %s)"
+                % (transaction_style, TRANSACTION_STYLE_VALUES)
+            )
+        self.transaction_style = transaction_style
+
+    @staticmethod
+    def setup_once():
+        # type: () -> None
+
+        version = parse_version(FALCON_VERSION)
+        _check_minimum_version(FalconIntegration, version)
+
+        _patch_wsgi_app()
+        _patch_handle_exception()
+        _patch_prepare_middleware()
+
+
+def _patch_wsgi_app():
+    # type: () -> None
+    original_wsgi_app = falcon_app_class.__call__
+
+    def sentry_patched_wsgi_app(self, env, start_response):
+        # type: (falcon.API, Any, Any) -> Any
+        integration = sentry_sdk.get_client().get_integration(FalconIntegration)
+        if integration is None:
+            return original_wsgi_app(self, env, start_response)
+
+        sentry_wrapped = SentryWsgiMiddleware(
+            lambda envi, start_resp: original_wsgi_app(self, envi, start_resp),
+            span_origin=FalconIntegration.origin,
+        )
+
+        return sentry_wrapped(env, start_response)
+
+    falcon_app_class.__call__ = sentry_patched_wsgi_app
+
+
+def _patch_handle_exception():
+    # type: () -> None
+    original_handle_exception = falcon_app_class._handle_exception
+
+    @ensure_integration_enabled(FalconIntegration, original_handle_exception)
+    def sentry_patched_handle_exception(self, *args):
+        # type: (falcon.API, *Any) -> Any
+        # NOTE(jmagnusson): falcon 2.0 changed falcon.API._handle_exception
+        # method signature from `(ex, req, resp, params)` to
+        # `(req, resp, ex, params)`
+        ex = response = None
+        with capture_internal_exceptions():
+            ex = next(argument for argument in args if isinstance(argument, Exception))
+            response = next(
+                argument for argument in args if isinstance(argument, falcon.Response)
+            )
+
+        was_handled = original_handle_exception(self, *args)
+
+        if ex is None or response is None:
+            # Both ex and response should have a non-None value at this point; otherwise,
+            # there is an error with the SDK that will have been captured in the
+            # capture_internal_exceptions block above.
+            return was_handled
+
+        if _exception_leads_to_http_5xx(ex, response):
+            event, hint = event_from_exception(
+                ex,
+                client_options=sentry_sdk.get_client().options,
+                mechanism={"type": "falcon", "handled": False},
+            )
+            sentry_sdk.capture_event(event, hint=hint)
+
+        return was_handled
+
+    falcon_app_class._handle_exception = sentry_patched_handle_exception
+
+
+def _patch_prepare_middleware():
+    # type: () -> None
+    original_prepare_middleware = falcon_helpers.prepare_middleware
+
+    def sentry_patched_prepare_middleware(
+        middleware=None, independent_middleware=False, asgi=False
+    ):
+        # type: (Any, Any, bool) -> Any
+        if asgi:
+            # We don't support ASGI Falcon apps, so we don't patch anything here
+            return original_prepare_middleware(middleware, independent_middleware, asgi)
+
+        integration = sentry_sdk.get_client().get_integration(FalconIntegration)
+        if integration is not None:
+            middleware = [SentryFalconMiddleware()] + (middleware or [])
+
+        # We intentionally omit the asgi argument here, since the default is False anyways,
+        # and this way, we remain backwards-compatible with pre-3.0.0 Falcon versions.
+        return original_prepare_middleware(middleware, independent_middleware)
+
+    falcon_helpers.prepare_middleware = sentry_patched_prepare_middleware
+
+
+def _exception_leads_to_http_5xx(ex, response):
+    # type: (Exception, falcon.Response) -> bool
+    is_server_error = isinstance(ex, falcon.HTTPError) and (ex.status or "").startswith(
+        "5"
+    )
+    is_unhandled_error = not isinstance(
+        ex, (falcon.HTTPError, falcon.http_status.HTTPStatus)
+    )
+
+    # We only check the HTTP status on Falcon 3 because in Falcon 2, the status on the response
+    # at the stage where we capture it is listed as 200, even though we would expect to see a 500
+    # status. Since at the time of this change, Falcon 2 is ca. 4 years old, we have decided to
+    # only perform this check on Falcon 3+, despite the risk that some handled errors might be
+    # reported to Sentry as unhandled on Falcon 2.
+    return (is_server_error or is_unhandled_error) and (
+        not FALCON3 or _has_http_5xx_status(response)
+    )
+
+
+def _has_http_5xx_status(response):
+    # type: (falcon.Response) -> bool
+    return response.status.startswith("5")
+
+
+def _set_transaction_name_and_source(event, transaction_style, request):
+    # type: (Event, str, falcon.Request) -> None
+    name_for_style = {
+        "uri_template": request.uri_template,
+        "path": request.path,
+    }
+    event["transaction"] = name_for_style[transaction_style]
+    event["transaction_info"] = {"source": SOURCE_FOR_STYLE[transaction_style]}
+
+
+def _make_request_event_processor(req, integration):
+    # type: (falcon.Request, FalconIntegration) -> EventProcessor
+
+    def event_processor(event, hint):
+        # type: (Event, dict[str, Any]) -> Event
+        _set_transaction_name_and_source(event, integration.transaction_style, req)
+
+        with capture_internal_exceptions():
+            FalconRequestExtractor(req).extract_into_event(event)
+
+        return event
+
+    return event_processor
diff --git a/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/fastapi.py b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/fastapi.py
new file mode 100644
index 00000000..76c6adee
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/fastapi.py
@@ -0,0 +1,147 @@
+import asyncio
+from copy import deepcopy
+from functools import wraps
+
+import sentry_sdk
+from sentry_sdk.integrations import DidNotEnable
+from sentry_sdk.scope import should_send_default_pii
+from sentry_sdk.tracing import SOURCE_FOR_STYLE, TransactionSource
+from sentry_sdk.utils import (
+    transaction_from_function,
+    logger,
+)
+
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+    from typing import Any, Callable, Dict
+    from sentry_sdk._types import Event
+
+try:
+    from sentry_sdk.integrations.starlette import (
+        StarletteIntegration,
+        StarletteRequestExtractor,
+    )
+except DidNotEnable:
+    raise DidNotEnable("Starlette is not installed")
+
+try:
+    import fastapi  # type: ignore
+except ImportError:
+    raise DidNotEnable("FastAPI is not installed")
+
+
+_DEFAULT_TRANSACTION_NAME = "generic FastAPI request"
+
+
+class FastApiIntegration(StarletteIntegration):
+    identifier = "fastapi"
+
+    @staticmethod
+    def setup_once():
+        # type: () -> None
+        patch_get_request_handler()
+
+
+def _set_transaction_name_and_source(scope, transaction_style, request):
+    # type: (sentry_sdk.Scope, str, Any) -> None
+    name = ""
+
+    if transaction_style == "endpoint":
+        endpoint = request.scope.get("endpoint")
+        if endpoint:
+            name = transaction_from_function(endpoint) or ""
+
+    elif transaction_style == "url":
+        route = request.scope.get("route")
+        if route:
+            path = getattr(route, "path", None)
+            if path is not None:
+                name = path
+
+    if not name:
+        name = _DEFAULT_TRANSACTION_NAME
+        source = TransactionSource.ROUTE
+    else:
+        source = SOURCE_FOR_STYLE[transaction_style]
+
+    scope.set_transaction_name(name, source=source)
+    logger.debug(
+        "[FastAPI] Set transaction name and source on scope: %s / %s", name, source
+    )
+
+
+def patch_get_request_handler():
+    # type: () -> None
+    old_get_request_handler = fastapi.routing.get_request_handler
+
+    def _sentry_get_request_handler(*args, **kwargs):
+        # type: (*Any, **Any) -> Any
+        dependant = kwargs.get("dependant")
+        if (
+            dependant
+            and dependant.call is not None
+            and not asyncio.iscoroutinefunction(dependant.call)
+        ):
+            old_call = dependant.call
+
+            @wraps(old_call)
+            def _sentry_call(*args, **kwargs):
+                # type: (*Any, **Any) -> Any
+                current_scope = sentry_sdk.get_current_scope()
+                if current_scope.transaction is not None:
+                    current_scope.transaction.update_active_thread()
+
+                sentry_scope = sentry_sdk.get_isolation_scope()
+                if sentry_scope.profile is not None:
+                    sentry_scope.profile.update_active_thread_id()
+
+                return old_call(*args, **kwargs)
+
+            dependant.call = _sentry_call
+
+        old_app = old_get_request_handler(*args, **kwargs)
+
+        async def _sentry_app(*args, **kwargs):
+            # type: (*Any, **Any) -> Any
+            integration = sentry_sdk.get_client().get_integration(FastApiIntegration)
+            if integration is None:
+                return await old_app(*args, **kwargs)
+
+            request = args[0]
+
+            _set_transaction_name_and_source(
+                sentry_sdk.get_current_scope(), integration.transaction_style, request
+            )
+            sentry_scope = sentry_sdk.get_isolation_scope()
+            extractor = StarletteRequestExtractor(request)
+            info = await extractor.extract_request_info()
+
+            def _make_request_event_processor(req, integration):
+                # type: (Any, Any) -> Callable[[Event, Dict[str, Any]], Event]
+                def event_processor(event, hint):
+                    # type: (Event, Dict[str, Any]) -> Event
+
+                    # Extract information from request
+                    request_info = event.get("request", {})
+                    if info:
+                        if "cookies" in info and should_send_default_pii():
+                            request_info["cookies"] = info["cookies"]
+                        if "data" in info:
+                            request_info["data"] = info["data"]
+                    event["request"] = deepcopy(request_info)
+
+                    return event
+
+                return event_processor
+
+            sentry_scope._name = FastApiIntegration.identifier
+            sentry_scope.add_event_processor(
+                _make_request_event_processor(request, integration)
+            )
+
+            return await old_app(*args, **kwargs)
+
+        return _sentry_app
+
+    fastapi.routing.get_request_handler = _sentry_get_request_handler
diff --git a/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/flask.py b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/flask.py
new file mode 100644
index 00000000..f45ec6db
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/flask.py
@@ -0,0 +1,275 @@
+import sentry_sdk
+from sentry_sdk.integrations import _check_minimum_version, DidNotEnable, Integration
+from sentry_sdk.integrations._wsgi_common import (
+    DEFAULT_HTTP_METHODS_TO_CAPTURE,
+    RequestExtractor,
+)
+from sentry_sdk.integrations.wsgi import SentryWsgiMiddleware
+from sentry_sdk.scope import should_send_default_pii
+from sentry_sdk.tracing import SOURCE_FOR_STYLE
+from sentry_sdk.utils import (
+    capture_internal_exceptions,
+    ensure_integration_enabled,
+    event_from_exception,
+    package_version,
+)
+
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+    from typing import Any, Callable, Dict, Union
+
+    from sentry_sdk._types import Event, EventProcessor
+    from sentry_sdk.integrations.wsgi import _ScopedResponse
+    from werkzeug.datastructures import FileStorage, ImmutableMultiDict
+
+
+try:
+    import flask_login  # type: ignore
+except ImportError:
+    flask_login = None
+
+try:
+    from flask import Flask, Request  # type: ignore
+    from flask import request as flask_request
+    from flask.signals import (
+        before_render_template,
+        got_request_exception,
+        request_started,
+    )
+    from markupsafe import Markup
+except ImportError:
+    raise DidNotEnable("Flask is not installed")
+
+try:
+    import blinker  # noqa
+except ImportError:
+    raise DidNotEnable("blinker is not installed")
+
+TRANSACTION_STYLE_VALUES = ("endpoint", "url")
+
+
+class FlaskIntegration(Integration):
+    identifier = "flask"
+    origin = f"auto.http.{identifier}"
+
+    transaction_style = ""
+
+    def __init__(
+        self,
+        transaction_style="endpoint",  # type: str
+        http_methods_to_capture=DEFAULT_HTTP_METHODS_TO_CAPTURE,  # type: tuple[str, ...]
+    ):
+        # type: (...) -> None
+        if transaction_style not in TRANSACTION_STYLE_VALUES:
+            raise ValueError(
+                "Invalid value for transaction_style: %s (must be in %s)"
+                % (transaction_style, TRANSACTION_STYLE_VALUES)
+            )
+        self.transaction_style = transaction_style
+        self.http_methods_to_capture = tuple(map(str.upper, http_methods_to_capture))
+
+    @staticmethod
+    def setup_once():
+        # type: () -> None
+        try:
+            from quart import Quart  # type: ignore
+
+            if Flask == Quart:
+                # This is Quart masquerading as Flask, don't enable the Flask
+                # integration. See https://github.com/getsentry/sentry-python/issues/2709
+                raise DidNotEnable(
+                    "This is not a Flask app but rather Quart pretending to be Flask"
+                )
+        except ImportError:
+            pass
+
+        version = package_version("flask")
+        _check_minimum_version(FlaskIntegration, version)
+
+        before_render_template.connect(_add_sentry_trace)
+        request_started.connect(_request_started)
+        got_request_exception.connect(_capture_exception)
+
+        old_app = Flask.__call__
+
+        def sentry_patched_wsgi_app(self, environ, start_response):
+            # type: (Any, Dict[str, str], Callable[..., Any]) -> _ScopedResponse
+            if sentry_sdk.get_client().get_integration(FlaskIntegration) is None:
+                return old_app(self, environ, start_response)
+
+            integration = sentry_sdk.get_client().get_integration(FlaskIntegration)
+
+            middleware = SentryWsgiMiddleware(
+                lambda *a, **kw: old_app(self, *a, **kw),
+                span_origin=FlaskIntegration.origin,
+                http_methods_to_capture=(
+                    integration.http_methods_to_capture
+                    if integration
+                    else DEFAULT_HTTP_METHODS_TO_CAPTURE
+                ),
+            )
+            return middleware(environ, start_response)
+
+        Flask.__call__ = sentry_patched_wsgi_app
+
+
+def _add_sentry_trace(sender, template, context, **extra):
+    # type: (Flask, Any, Dict[str, Any], **Any) -> None
+    if "sentry_trace" in context:
+        return
+
+    scope = sentry_sdk.get_current_scope()
+    trace_meta = Markup(scope.trace_propagation_meta())
+    context["sentry_trace"] = trace_meta  # for backwards compatibility
+    context["sentry_trace_meta"] = trace_meta
+
+
+def _set_transaction_name_and_source(scope, transaction_style, request):
+    # type: (sentry_sdk.Scope, str, Request) -> None
+    try:
+        name_for_style = {
+            "url": request.url_rule.rule,
+            "endpoint": request.url_rule.endpoint,
+        }
+        scope.set_transaction_name(
+            name_for_style[transaction_style],
+            source=SOURCE_FOR_STYLE[transaction_style],
+        )
+    except Exception:
+        pass
+
+
+def _request_started(app, **kwargs):
+    # type: (Flask, **Any) -> None
+    integration = sentry_sdk.get_client().get_integration(FlaskIntegration)
+    if integration is None:
+        return
+
+    request = flask_request._get_current_object()
+
+    # Set the transaction name and source here,
+    # but rely on WSGI middleware to actually start the transaction
+    _set_transaction_name_and_source(
+        sentry_sdk.get_current_scope(), integration.transaction_style, request
+    )
+
+    scope = sentry_sdk.get_isolation_scope()
+    evt_processor = _make_request_event_processor(app, request, integration)
+    scope.add_event_processor(evt_processor)
+
+
+class FlaskRequestExtractor(RequestExtractor):
+    def env(self):
+        # type: () -> Dict[str, str]
+        return self.request.environ
+
+    def cookies(self):
+        # type: () -> Dict[Any, Any]
+        return {
+            k: v[0] if isinstance(v, list) and len(v) == 1 else v
+            for k, v in self.request.cookies.items()
+        }
+
+    def raw_data(self):
+        # type: () -> bytes
+        return self.request.get_data()
+
+    def form(self):
+        # type: () -> ImmutableMultiDict[str, Any]
+        return self.request.form
+
+    def files(self):
+        # type: () -> ImmutableMultiDict[str, Any]
+        return self.request.files
+
+    def is_json(self):
+        # type: () -> bool
+        return self.request.is_json
+
+    def json(self):
+        # type: () -> Any
+        return self.request.get_json(silent=True)
+
+    def size_of_file(self, file):
+        # type: (FileStorage) -> int
+        return file.content_length
+
+
+def _make_request_event_processor(app, request, integration):
+    # type: (Flask, Callable[[], Request], FlaskIntegration) -> EventProcessor
+
+    def inner(event, hint):
+        # type: (Event, dict[str, Any]) -> Event
+
+        # if the request is gone we are fine not logging the data from
+        # it.  This might happen if the processor is pushed away to
+        # another thread.
+        if request is None:
+            return event
+
+        with capture_internal_exceptions():
+            FlaskRequestExtractor(request).extract_into_event(event)
+
+        if should_send_default_pii():
+            with capture_internal_exceptions():
+                _add_user_to_event(event)
+
+        return event
+
+    return inner
+
+
+@ensure_integration_enabled(FlaskIntegration)
+def _capture_exception(sender, exception, **kwargs):
+    # type: (Flask, Union[ValueError, BaseException], **Any) -> None
+    event, hint = event_from_exception(
+        exception,
+        client_options=sentry_sdk.get_client().options,
+        mechanism={"type": "flask", "handled": False},
+    )
+
+    sentry_sdk.capture_event(event, hint=hint)
+
+
+def _add_user_to_event(event):
+    # type: (Event) -> None
+    if flask_login is None:
+        return
+
+    user = flask_login.current_user
+    if user is None:
+        return
+
+    with capture_internal_exceptions():
+        # Access this object as late as possible as accessing the user
+        # is relatively costly
+
+        user_info = event.setdefault("user", {})
+
+        try:
+            user_info.setdefault("id", user.get_id())
+            # TODO: more configurable user attrs here
+        except AttributeError:
+            # might happen if:
+            # - flask_login could not be imported
+            # - flask_login is not configured
+            # - no user is logged in
+            pass
+
+        # The following attribute accesses are ineffective for the general
+        # Flask-Login case, because the User interface of Flask-Login does not
+        # care about anything but the ID. However, Flask-User (based on
+        # Flask-Login) documents a few optional extra attributes.
+        #
+        # https://github.com/lingthio/Flask-User/blob/a379fa0a281789618c484b459cb41236779b95b1/docs/source/data_models.rst#fixed-data-model-property-names
+
+        try:
+            user_info.setdefault("email", user.email)
+        except Exception:
+            pass
+
+        try:
+            user_info.setdefault("username", user.username)
+        except Exception:
+            pass
diff --git a/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/gcp.py b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/gcp.py
new file mode 100644
index 00000000..c637b741
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/gcp.py
@@ -0,0 +1,234 @@
+import functools
+import sys
+from copy import deepcopy
+from datetime import datetime, timedelta, timezone
+from os import environ
+
+import sentry_sdk
+from sentry_sdk.api import continue_trace
+from sentry_sdk.consts import OP
+from sentry_sdk.integrations import Integration
+from sentry_sdk.integrations._wsgi_common import _filter_headers
+from sentry_sdk.scope import should_send_default_pii
+from sentry_sdk.tracing import TransactionSource
+from sentry_sdk.utils import (
+    AnnotatedValue,
+    capture_internal_exceptions,
+    event_from_exception,
+    logger,
+    TimeoutThread,
+    reraise,
+)
+
+from typing import TYPE_CHECKING
+
+# Constants
+TIMEOUT_WARNING_BUFFER = 1.5  # Buffer time required to send timeout warning to Sentry
+MILLIS_TO_SECONDS = 1000.0
+
+if TYPE_CHECKING:
+    from typing import Any
+    from typing import TypeVar
+    from typing import Callable
+    from typing import Optional
+
+    from sentry_sdk._types import EventProcessor, Event, Hint
+
+    F = TypeVar("F", bound=Callable[..., Any])
+
+
+def _wrap_func(func):
+    # type: (F) -> F
+    @functools.wraps(func)
+    def sentry_func(functionhandler, gcp_event, *args, **kwargs):
+        # type: (Any, Any, *Any, **Any) -> Any
+        client = sentry_sdk.get_client()
+
+        integration = client.get_integration(GcpIntegration)
+        if integration is None:
+            return func(functionhandler, gcp_event, *args, **kwargs)
+
+        configured_time = environ.get("FUNCTION_TIMEOUT_SEC")
+        if not configured_time:
+            logger.debug(
+                "The configured timeout could not be fetched from Cloud Functions configuration."
+            )
+            return func(functionhandler, gcp_event, *args, **kwargs)
+
+        configured_time = int(configured_time)
+
+        initial_time = datetime.now(timezone.utc)
+
+        with sentry_sdk.isolation_scope() as scope:
+            with capture_internal_exceptions():
+                scope.clear_breadcrumbs()
+                scope.add_event_processor(
+                    _make_request_event_processor(
+                        gcp_event, configured_time, initial_time
+                    )
+                )
+                scope.set_tag("gcp_region", environ.get("FUNCTION_REGION"))
+                timeout_thread = None
+                if (
+                    integration.timeout_warning
+                    and configured_time > TIMEOUT_WARNING_BUFFER
+                ):
+                    waiting_time = configured_time - TIMEOUT_WARNING_BUFFER
+
+                    timeout_thread = TimeoutThread(waiting_time, configured_time)
+
+                    # Starting the thread to raise timeout warning exception
+                    timeout_thread.start()
+
+            headers = {}
+            if hasattr(gcp_event, "headers"):
+                headers = gcp_event.headers
+
+            transaction = continue_trace(
+                headers,
+                op=OP.FUNCTION_GCP,
+                name=environ.get("FUNCTION_NAME", ""),
+                source=TransactionSource.COMPONENT,
+                origin=GcpIntegration.origin,
+            )
+            sampling_context = {
+                "gcp_env": {
+                    "function_name": environ.get("FUNCTION_NAME"),
+                    "function_entry_point": environ.get("ENTRY_POINT"),
+                    "function_identity": environ.get("FUNCTION_IDENTITY"),
+                    "function_region": environ.get("FUNCTION_REGION"),
+                    "function_project": environ.get("GCP_PROJECT"),
+                },
+                "gcp_event": gcp_event,
+            }
+            with sentry_sdk.start_transaction(
+                transaction, custom_sampling_context=sampling_context
+            ):
+                try:
+                    return func(functionhandler, gcp_event, *args, **kwargs)
+                except Exception:
+                    exc_info = sys.exc_info()
+                    sentry_event, hint = event_from_exception(
+                        exc_info,
+                        client_options=client.options,
+                        mechanism={"type": "gcp", "handled": False},
+                    )
+                    sentry_sdk.capture_event(sentry_event, hint=hint)
+                    reraise(*exc_info)
+                finally:
+                    if timeout_thread:
+                        timeout_thread.stop()
+                    # Flush out the event queue
+                    client.flush()
+
+    return sentry_func  # type: ignore
+
+
+class GcpIntegration(Integration):
+    identifier = "gcp"
+    origin = f"auto.function.{identifier}"
+
+    def __init__(self, timeout_warning=False):
+        # type: (bool) -> None
+        self.timeout_warning = timeout_warning
+
+    @staticmethod
+    def setup_once():
+        # type: () -> None
+        import __main__ as gcp_functions
+
+        if not hasattr(gcp_functions, "worker_v1"):
+            logger.warning(
+                "GcpIntegration currently supports only Python 3.7 runtime environment."
+            )
+            return
+
+        worker1 = gcp_functions.worker_v1
+
+        worker1.FunctionHandler.invoke_user_function = _wrap_func(
+            worker1.FunctionHandler.invoke_user_function
+        )
+
+
+def _make_request_event_processor(gcp_event, configured_timeout, initial_time):
+    # type: (Any, Any, Any) -> EventProcessor
+
+    def event_processor(event, hint):
+        # type: (Event, Hint) -> Optional[Event]
+
+        final_time = datetime.now(timezone.utc)
+        time_diff = final_time - initial_time
+
+        execution_duration_in_millis = time_diff / timedelta(milliseconds=1)
+
+        extra = event.setdefault("extra", {})
+        extra["google cloud functions"] = {
+            "function_name": environ.get("FUNCTION_NAME"),
+            "function_entry_point": environ.get("ENTRY_POINT"),
+            "function_identity": environ.get("FUNCTION_IDENTITY"),
+            "function_region": environ.get("FUNCTION_REGION"),
+            "function_project": environ.get("GCP_PROJECT"),
+            "execution_duration_in_millis": execution_duration_in_millis,
+            "configured_timeout_in_seconds": configured_timeout,
+        }
+
+        extra["google cloud logs"] = {
+            "url": _get_google_cloud_logs_url(final_time),
+        }
+
+        request = event.get("request", {})
+
+        request["url"] = "gcp:///{}".format(environ.get("FUNCTION_NAME"))
+
+        if hasattr(gcp_event, "method"):
+            request["method"] = gcp_event.method
+
+        if hasattr(gcp_event, "query_string"):
+            request["query_string"] = gcp_event.query_string.decode("utf-8")
+
+        if hasattr(gcp_event, "headers"):
+            request["headers"] = _filter_headers(gcp_event.headers)
+
+        if should_send_default_pii():
+            if hasattr(gcp_event, "data"):
+                request["data"] = gcp_event.data
+        else:
+            if hasattr(gcp_event, "data"):
+                # Unfortunately couldn't find a way to get structured body from GCP
+                # event. Meaning every body is unstructured to us.
+                request["data"] = AnnotatedValue.removed_because_raw_data()
+
+        event["request"] = deepcopy(request)
+
+        return event
+
+    return event_processor
+
+
+def _get_google_cloud_logs_url(final_time):
+    # type: (datetime) -> str
+    """
+    Generates a Google Cloud Logs console URL based on the environment variables
+    Arguments:
+        final_time {datetime} -- Final time
+    Returns:
+        str -- Google Cloud Logs Console URL to logs.
+    """
+    hour_ago = final_time - timedelta(hours=1)
+    formatstring = "%Y-%m-%dT%H:%M:%SZ"
+
+    url = (
+        "https://console.cloud.google.com/logs/viewer?project={project}&resource=cloud_function"
+        "%2Ffunction_name%2F{function_name}%2Fregion%2F{region}&minLogLevel=0&expandAll=false"
+        "&timestamp={timestamp_end}&customFacets=&limitCustomFacetWidth=true"
+        "&dateRangeStart={timestamp_start}&dateRangeEnd={timestamp_end}"
+        "&interval=PT1H&scrollTimestamp={timestamp_end}"
+    ).format(
+        project=environ.get("GCP_PROJECT"),
+        function_name=environ.get("FUNCTION_NAME"),
+        region=environ.get("FUNCTION_REGION"),
+        timestamp_end=final_time.strftime(formatstring),
+        timestamp_start=hour_ago.strftime(formatstring),
+    )
+
+    return url
diff --git a/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/gnu_backtrace.py b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/gnu_backtrace.py
new file mode 100644
index 00000000..dc3dc80f
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/gnu_backtrace.py
@@ -0,0 +1,107 @@
+import re
+
+import sentry_sdk
+from sentry_sdk.integrations import Integration
+from sentry_sdk.scope import add_global_event_processor
+from sentry_sdk.utils import capture_internal_exceptions
+
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+    from typing import Any
+    from sentry_sdk._types import Event
+
+
+MODULE_RE = r"[a-zA-Z0-9/._:\\-]+"
+TYPE_RE = r"[a-zA-Z0-9._:<>,-]+"
+HEXVAL_RE = r"[A-Fa-f0-9]+"
+
+
+FRAME_RE = r"""
+^(?P<index>\d+)\.\s
+(?P<package>{MODULE_RE})\(
+  (?P<retval>{TYPE_RE}\ )?
+  ((?P<function>{TYPE_RE})
+    (?P<args>\(.*\))?
+  )?
+  ((?P<constoffset>\ const)?\+0x(?P<offset>{HEXVAL_RE}))?
+\)\s
+\[0x(?P<retaddr>{HEXVAL_RE})\]$
+""".format(
+    MODULE_RE=MODULE_RE, HEXVAL_RE=HEXVAL_RE, TYPE_RE=TYPE_RE
+)
+
+FRAME_RE = re.compile(FRAME_RE, re.MULTILINE | re.VERBOSE)
+
+
+class GnuBacktraceIntegration(Integration):
+    identifier = "gnu_backtrace"
+
+    @staticmethod
+    def setup_once():
+        # type: () -> None
+        @add_global_event_processor
+        def process_gnu_backtrace(event, hint):
+            # type: (Event, dict[str, Any]) -> Event
+            with capture_internal_exceptions():
+                return _process_gnu_backtrace(event, hint)
+
+
+def _process_gnu_backtrace(event, hint):
+    # type: (Event, dict[str, Any]) -> Event
+    if sentry_sdk.get_client().get_integration(GnuBacktraceIntegration) is None:
+        return event
+
+    exc_info = hint.get("exc_info", None)
+
+    if exc_info is None:
+        return event
+
+    exception = event.get("exception", None)
+
+    if exception is None:
+        return event
+
+    values = exception.get("values", None)
+
+    if values is None:
+        return event
+
+    for exception in values:
+        frames = exception.get("stacktrace", {}).get("frames", [])
+        if not frames:
+            continue
+
+        msg = exception.get("value", None)
+        if not msg:
+            continue
+
+        additional_frames = []
+        new_msg = []
+
+        for line in msg.splitlines():
+            match = FRAME_RE.match(line)
+            if match:
+                additional_frames.append(
+                    (
+                        int(match.group("index")),
+                        {
+                            "package": match.group("package") or None,
+                            "function": match.group("function") or None,
+                            "platform": "native",
+                        },
+                    )
+                )
+            else:
+                # Put garbage lines back into message, not sure what to do with them.
+                new_msg.append(line)
+
+        if additional_frames:
+            additional_frames.sort(key=lambda x: -x[0])
+            for _, frame in additional_frames:
+                frames.append(frame)
+
+            new_msg.append("<stacktrace parsed and removed by GnuBacktraceIntegration>")
+            exception["value"] = "\n".join(new_msg)
+
+    return event
diff --git a/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/gql.py b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/gql.py
new file mode 100644
index 00000000..5f4436f5
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/gql.py
@@ -0,0 +1,145 @@
+import sentry_sdk
+from sentry_sdk.utils import (
+    event_from_exception,
+    ensure_integration_enabled,
+    parse_version,
+)
+
+from sentry_sdk.integrations import _check_minimum_version, DidNotEnable, Integration
+from sentry_sdk.scope import should_send_default_pii
+
+try:
+    import gql  # type: ignore[import-not-found]
+    from graphql import (
+        print_ast,
+        get_operation_ast,
+        DocumentNode,
+        VariableDefinitionNode,
+    )
+    from gql.transport import Transport, AsyncTransport  # type: ignore[import-not-found]
+    from gql.transport.exceptions import TransportQueryError  # type: ignore[import-not-found]
+except ImportError:
+    raise DidNotEnable("gql is not installed")
+
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+    from typing import Any, Dict, Tuple, Union
+    from sentry_sdk._types import Event, EventProcessor
+
+    EventDataType = Dict[str, Union[str, Tuple[VariableDefinitionNode, ...]]]
+
+
+class GQLIntegration(Integration):
+    identifier = "gql"
+
+    @staticmethod
+    def setup_once():
+        # type: () -> None
+        gql_version = parse_version(gql.__version__)
+        _check_minimum_version(GQLIntegration, gql_version)
+
+        _patch_execute()
+
+
+def _data_from_document(document):
+    # type: (DocumentNode) -> EventDataType
+    try:
+        operation_ast = get_operation_ast(document)
+        data = {"query": print_ast(document)}  # type: EventDataType
+
+        if operation_ast is not None:
+            data["variables"] = operation_ast.variable_definitions
+            if operation_ast.name is not None:
+                data["operationName"] = operation_ast.name.value
+
+        return data
+    except (AttributeError, TypeError):
+        return dict()
+
+
+def _transport_method(transport):
+    # type: (Union[Transport, AsyncTransport]) -> str
+    """
+    The RequestsHTTPTransport allows defining the HTTP method; all
+    other transports use POST.
+    """
+    try:
+        return transport.method
+    except AttributeError:
+        return "POST"
+
+
+def _request_info_from_transport(transport):
+    # type: (Union[Transport, AsyncTransport, None]) -> Dict[str, str]
+    if transport is None:
+        return {}
+
+    request_info = {
+        "method": _transport_method(transport),
+    }
+
+    try:
+        request_info["url"] = transport.url
+    except AttributeError:
+        pass
+
+    return request_info
+
+
+def _patch_execute():
+    # type: () -> None
+    real_execute = gql.Client.execute
+
+    @ensure_integration_enabled(GQLIntegration, real_execute)
+    def sentry_patched_execute(self, document, *args, **kwargs):
+        # type: (gql.Client, DocumentNode, Any, Any) -> Any
+        scope = sentry_sdk.get_isolation_scope()
+        scope.add_event_processor(_make_gql_event_processor(self, document))
+
+        try:
+            return real_execute(self, document, *args, **kwargs)
+        except TransportQueryError as e:
+            event, hint = event_from_exception(
+                e,
+                client_options=sentry_sdk.get_client().options,
+                mechanism={"type": "gql", "handled": False},
+            )
+
+            sentry_sdk.capture_event(event, hint)
+            raise e
+
+    gql.Client.execute = sentry_patched_execute
+
+
+def _make_gql_event_processor(client, document):
+    # type: (gql.Client, DocumentNode) -> EventProcessor
+    def processor(event, hint):
+        # type: (Event, dict[str, Any]) -> Event
+        try:
+            errors = hint["exc_info"][1].errors
+        except (AttributeError, KeyError):
+            errors = None
+
+        request = event.setdefault("request", {})
+        request.update(
+            {
+                "api_target": "graphql",
+                **_request_info_from_transport(client.transport),
+            }
+        )
+
+        if should_send_default_pii():
+            request["data"] = _data_from_document(document)
+            contexts = event.setdefault("contexts", {})
+            response = contexts.setdefault("response", {})
+            response.update(
+                {
+                    "data": {"errors": errors},
+                    "type": response,
+                }
+            )
+
+        return event
+
+    return processor
diff --git a/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/graphene.py b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/graphene.py
new file mode 100644
index 00000000..00a8d155
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/graphene.py
@@ -0,0 +1,151 @@
+from contextlib import contextmanager
+
+import sentry_sdk
+from sentry_sdk.consts import OP
+from sentry_sdk.integrations import _check_minimum_version, DidNotEnable, Integration
+from sentry_sdk.scope import should_send_default_pii
+from sentry_sdk.utils import (
+    capture_internal_exceptions,
+    ensure_integration_enabled,
+    event_from_exception,
+    package_version,
+)
+
+try:
+    from graphene.types import schema as graphene_schema  # type: ignore
+except ImportError:
+    raise DidNotEnable("graphene is not installed")
+
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+    from collections.abc import Generator
+    from typing import Any, Dict, Union
+    from graphene.language.source import Source  # type: ignore
+    from graphql.execution import ExecutionResult
+    from graphql.type import GraphQLSchema
+    from sentry_sdk._types import Event
+
+
+class GrapheneIntegration(Integration):
+    identifier = "graphene"
+
+    @staticmethod
+    def setup_once():
+        # type: () -> None
+        version = package_version("graphene")
+        _check_minimum_version(GrapheneIntegration, version)
+
+        _patch_graphql()
+
+
+def _patch_graphql():
+    # type: () -> None
+    old_graphql_sync = graphene_schema.graphql_sync
+    old_graphql_async = graphene_schema.graphql
+
+    @ensure_integration_enabled(GrapheneIntegration, old_graphql_sync)
+    def _sentry_patched_graphql_sync(schema, source, *args, **kwargs):
+        # type: (GraphQLSchema, Union[str, Source], Any, Any) -> ExecutionResult
+        scope = sentry_sdk.get_isolation_scope()
+        scope.add_event_processor(_event_processor)
+
+        with graphql_span(schema, source, kwargs):
+            result = old_graphql_sync(schema, source, *args, **kwargs)
+
+        with capture_internal_exceptions():
+            client = sentry_sdk.get_client()
+            for error in result.errors or []:
+                event, hint = event_from_exception(
+                    error,
+                    client_options=client.options,
+                    mechanism={
+                        "type": GrapheneIntegration.identifier,
+                        "handled": False,
+                    },
+                )
+                sentry_sdk.capture_event(event, hint=hint)
+
+        return result
+
+    async def _sentry_patched_graphql_async(schema, source, *args, **kwargs):
+        # type: (GraphQLSchema, Union[str, Source], Any, Any) -> ExecutionResult
+        integration = sentry_sdk.get_client().get_integration(GrapheneIntegration)
+        if integration is None:
+            return await old_graphql_async(schema, source, *args, **kwargs)
+
+        scope = sentry_sdk.get_isolation_scope()
+        scope.add_event_processor(_event_processor)
+
+        with graphql_span(schema, source, kwargs):
+            result = await old_graphql_async(schema, source, *args, **kwargs)
+
+        with capture_internal_exceptions():
+            client = sentry_sdk.get_client()
+            for error in result.errors or []:
+                event, hint = event_from_exception(
+                    error,
+                    client_options=client.options,
+                    mechanism={
+                        "type": GrapheneIntegration.identifier,
+                        "handled": False,
+                    },
+                )
+                sentry_sdk.capture_event(event, hint=hint)
+
+        return result
+
+    graphene_schema.graphql_sync = _sentry_patched_graphql_sync
+    graphene_schema.graphql = _sentry_patched_graphql_async
+
+
+def _event_processor(event, hint):
+    # type: (Event, Dict[str, Any]) -> Event
+    if should_send_default_pii():
+        request_info = event.setdefault("request", {})
+        request_info["api_target"] = "graphql"
+
+    elif event.get("request", {}).get("data"):
+        del event["request"]["data"]
+
+    return event
+
+
+@contextmanager
+def graphql_span(schema, source, kwargs):
+    # type: (GraphQLSchema, Union[str, Source], Dict[str, Any]) -> Generator[None, None, None]
+    operation_name = kwargs.get("operation_name")
+
+    operation_type = "query"
+    op = OP.GRAPHQL_QUERY
+    if source.strip().startswith("mutation"):
+        operation_type = "mutation"
+        op = OP.GRAPHQL_MUTATION
+    elif source.strip().startswith("subscription"):
+        operation_type = "subscription"
+        op = OP.GRAPHQL_SUBSCRIPTION
+
+    sentry_sdk.add_breadcrumb(
+        crumb={
+            "data": {
+                "operation_name": operation_name,
+                "operation_type": operation_type,
+            },
+            "category": "graphql.operation",
+        },
+    )
+
+    scope = sentry_sdk.get_current_scope()
+    if scope.span:
+        _graphql_span = scope.span.start_child(op=op, name=operation_name)
+    else:
+        _graphql_span = sentry_sdk.start_span(op=op, name=operation_name)
+
+    _graphql_span.set_data("graphql.document", source)
+    _graphql_span.set_data("graphql.operation.name", operation_name)
+    _graphql_span.set_data("graphql.operation.type", operation_type)
+
+    try:
+        yield
+    finally:
+        _graphql_span.finish()
diff --git a/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/grpc/__init__.py b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/grpc/__init__.py
new file mode 100644
index 00000000..d9dcdddb
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/grpc/__init__.py
@@ -0,0 +1,151 @@
+from functools import wraps
+
+import grpc
+from grpc import Channel, Server, intercept_channel
+from grpc.aio import Channel as AsyncChannel
+from grpc.aio import Server as AsyncServer
+
+from sentry_sdk.integrations import Integration
+
+from .client import ClientInterceptor
+from .server import ServerInterceptor
+from .aio.server import ServerInterceptor as AsyncServerInterceptor
+from .aio.client import (
+    SentryUnaryUnaryClientInterceptor as AsyncUnaryUnaryClientInterceptor,
+)
+from .aio.client import (
+    SentryUnaryStreamClientInterceptor as AsyncUnaryStreamClientIntercetor,
+)
+
+from typing import TYPE_CHECKING, Any, Optional, Sequence
+
+# Hack to get new Python features working in older versions
+# without introducing a hard dependency on `typing_extensions`
+# from: https://stackoverflow.com/a/71944042/300572
+if TYPE_CHECKING:
+    from typing import ParamSpec, Callable
+else:
+    # Fake ParamSpec
+    class ParamSpec:
+        def __init__(self, _):
+            self.args = None
+            self.kwargs = None
+
+    # Callable[anything] will return None
+    class _Callable:
+        def __getitem__(self, _):
+            return None
+
+    # Make instances
+    Callable = _Callable()
+
+P = ParamSpec("P")
+
+
+def _wrap_channel_sync(func: Callable[P, Channel]) -> Callable[P, Channel]:
+    "Wrapper for synchronous secure and insecure channel."
+
+    @wraps(func)
+    def patched_channel(*args: Any, **kwargs: Any) -> Channel:
+        channel = func(*args, **kwargs)
+        if not ClientInterceptor._is_intercepted:
+            ClientInterceptor._is_intercepted = True
+            return intercept_channel(channel, ClientInterceptor())
+        else:
+            return channel
+
+    return patched_channel
+
+
+def _wrap_intercept_channel(func: Callable[P, Channel]) -> Callable[P, Channel]:
+    @wraps(func)
+    def patched_intercept_channel(
+        channel: Channel, *interceptors: grpc.ServerInterceptor
+    ) -> Channel:
+        if ClientInterceptor._is_intercepted:
+            interceptors = tuple(
+                [
+                    interceptor
+                    for interceptor in interceptors
+                    if not isinstance(interceptor, ClientInterceptor)
+                ]
+            )
+        else:
+            interceptors = interceptors
+        return intercept_channel(channel, *interceptors)
+
+    return patched_intercept_channel  # type: ignore
+
+
+def _wrap_channel_async(func: Callable[P, AsyncChannel]) -> Callable[P, AsyncChannel]:
+    "Wrapper for asynchronous secure and insecure channel."
+
+    @wraps(func)
+    def patched_channel(  # type: ignore
+        *args: P.args,
+        interceptors: Optional[Sequence[grpc.aio.ClientInterceptor]] = None,
+        **kwargs: P.kwargs,
+    ) -> Channel:
+        sentry_interceptors = [
+            AsyncUnaryUnaryClientInterceptor(),
+            AsyncUnaryStreamClientIntercetor(),
+        ]
+        interceptors = [*sentry_interceptors, *(interceptors or [])]
+        return func(*args, interceptors=interceptors, **kwargs)  # type: ignore
+
+    return patched_channel  # type: ignore
+
+
+def _wrap_sync_server(func: Callable[P, Server]) -> Callable[P, Server]:
+    """Wrapper for synchronous server."""
+
+    @wraps(func)
+    def patched_server(  # type: ignore
+        *args: P.args,
+        interceptors: Optional[Sequence[grpc.ServerInterceptor]] = None,
+        **kwargs: P.kwargs,
+    ) -> Server:
+        interceptors = [
+            interceptor
+            for interceptor in interceptors or []
+            if not isinstance(interceptor, ServerInterceptor)
+        ]
+        server_interceptor = ServerInterceptor()
+        interceptors = [server_interceptor, *(interceptors or [])]
+        return func(*args, interceptors=interceptors, **kwargs)  # type: ignore
+
+    return patched_server  # type: ignore
+
+
+def _wrap_async_server(func: Callable[P, AsyncServer]) -> Callable[P, AsyncServer]:
+    """Wrapper for asynchronous server."""
+
+    @wraps(func)
+    def patched_aio_server(  # type: ignore
+        *args: P.args,
+        interceptors: Optional[Sequence[grpc.ServerInterceptor]] = None,
+        **kwargs: P.kwargs,
+    ) -> Server:
+        server_interceptor = AsyncServerInterceptor()
+        interceptors = (server_interceptor, *(interceptors or []))
+        return func(*args, interceptors=interceptors, **kwargs)  # type: ignore
+
+    return patched_aio_server  # type: ignore
+
+
+class GRPCIntegration(Integration):
+    identifier = "grpc"
+
+    @staticmethod
+    def setup_once() -> None:
+        import grpc
+
+        grpc.insecure_channel = _wrap_channel_sync(grpc.insecure_channel)
+        grpc.secure_channel = _wrap_channel_sync(grpc.secure_channel)
+        grpc.intercept_channel = _wrap_intercept_channel(grpc.intercept_channel)
+
+        grpc.aio.insecure_channel = _wrap_channel_async(grpc.aio.insecure_channel)
+        grpc.aio.secure_channel = _wrap_channel_async(grpc.aio.secure_channel)
+
+        grpc.server = _wrap_sync_server(grpc.server)
+        grpc.aio.server = _wrap_async_server(grpc.aio.server)
diff --git a/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/grpc/aio/__init__.py b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/grpc/aio/__init__.py
new file mode 100644
index 00000000..5b9e3b99
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/grpc/aio/__init__.py
@@ -0,0 +1,7 @@
+from .server import ServerInterceptor
+from .client import ClientInterceptor
+
+__all__ = [
+    "ClientInterceptor",
+    "ServerInterceptor",
+]
diff --git a/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/grpc/aio/client.py b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/grpc/aio/client.py
new file mode 100644
index 00000000..ff3c2131
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/grpc/aio/client.py
@@ -0,0 +1,94 @@
+from typing import Callable, Union, AsyncIterable, Any
+
+from grpc.aio import (
+    UnaryUnaryClientInterceptor,
+    UnaryStreamClientInterceptor,
+    ClientCallDetails,
+    UnaryUnaryCall,
+    UnaryStreamCall,
+    Metadata,
+)
+from google.protobuf.message import Message
+
+import sentry_sdk
+from sentry_sdk.consts import OP
+from sentry_sdk.integrations.grpc.consts import SPAN_ORIGIN
+
+
+class ClientInterceptor:
+    @staticmethod
+    def _update_client_call_details_metadata_from_scope(
+        client_call_details: ClientCallDetails,
+    ) -> ClientCallDetails:
+        if client_call_details.metadata is None:
+            client_call_details = client_call_details._replace(metadata=Metadata())
+        elif not isinstance(client_call_details.metadata, Metadata):
+            # This is a workaround for a GRPC bug, which was fixed in grpcio v1.60.0
+            # See https://github.com/grpc/grpc/issues/34298.
+            client_call_details = client_call_details._replace(
+                metadata=Metadata.from_tuple(client_call_details.metadata)
+            )
+        for (
+            key,
+            value,
+        ) in sentry_sdk.get_current_scope().iter_trace_propagation_headers():
+            client_call_details.metadata.add(key, value)
+        return client_call_details
+
+
+class SentryUnaryUnaryClientInterceptor(ClientInterceptor, UnaryUnaryClientInterceptor):  # type: ignore
+    async def intercept_unary_unary(
+        self,
+        continuation: Callable[[ClientCallDetails, Message], UnaryUnaryCall],
+        client_call_details: ClientCallDetails,
+        request: Message,
+    ) -> Union[UnaryUnaryCall, Message]:
+        method = client_call_details.method
+
+        with sentry_sdk.start_span(
+            op=OP.GRPC_CLIENT,
+            name="unary unary call to %s" % method.decode(),
+            origin=SPAN_ORIGIN,
+        ) as span:
+            span.set_data("type", "unary unary")
+            span.set_data("method", method)
+
+            client_call_details = self._update_client_call_details_metadata_from_scope(
+                client_call_details
+            )
+
+            response = await continuation(client_call_details, request)
+            status_code = await response.code()
+            span.set_data("code", status_code.name)
+
+            return response
+
+
+class SentryUnaryStreamClientInterceptor(
+    ClientInterceptor, UnaryStreamClientInterceptor  # type: ignore
+):
+    async def intercept_unary_stream(
+        self,
+        continuation: Callable[[ClientCallDetails, Message], UnaryStreamCall],
+        client_call_details: ClientCallDetails,
+        request: Message,
+    ) -> Union[AsyncIterable[Any], UnaryStreamCall]:
+        method = client_call_details.method
+
+        with sentry_sdk.start_span(
+            op=OP.GRPC_CLIENT,
+            name="unary stream call to %s" % method.decode(),
+            origin=SPAN_ORIGIN,
+        ) as span:
+            span.set_data("type", "unary stream")
+            span.set_data("method", method)
+
+            client_call_details = self._update_client_call_details_metadata_from_scope(
+                client_call_details
+            )
+
+            response = await continuation(client_call_details, request)
+            # status_code = await response.code()
+            # span.set_data("code", status_code)
+
+            return response
diff --git a/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/grpc/aio/server.py b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/grpc/aio/server.py
new file mode 100644
index 00000000..381c6310
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/grpc/aio/server.py
@@ -0,0 +1,100 @@
+import sentry_sdk
+from sentry_sdk.consts import OP
+from sentry_sdk.integrations import DidNotEnable
+from sentry_sdk.integrations.grpc.consts import SPAN_ORIGIN
+from sentry_sdk.tracing import Transaction, TransactionSource
+from sentry_sdk.utils import event_from_exception
+
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+    from collections.abc import Awaitable, Callable
+    from typing import Any, Optional
+
+
+try:
+    import grpc
+    from grpc import HandlerCallDetails, RpcMethodHandler
+    from grpc.aio import AbortError, ServicerContext
+except ImportError:
+    raise DidNotEnable("grpcio is not installed")
+
+
+class ServerInterceptor(grpc.aio.ServerInterceptor):  # type: ignore
+    def __init__(self, find_name=None):
+        # type: (ServerInterceptor, Callable[[ServicerContext], str] | None) -> None
+        self._find_method_name = find_name or self._find_name
+
+        super().__init__()
+
+    async def intercept_service(self, continuation, handler_call_details):
+        # type: (ServerInterceptor, Callable[[HandlerCallDetails], Awaitable[RpcMethodHandler]], HandlerCallDetails) -> Optional[Awaitable[RpcMethodHandler]]
+        self._handler_call_details = handler_call_details
+        handler = await continuation(handler_call_details)
+        if handler is None:
+            return None
+
+        if not handler.request_streaming and not handler.response_streaming:
+            handler_factory = grpc.unary_unary_rpc_method_handler
+
+            async def wrapped(request, context):
+                # type: (Any, ServicerContext) -> Any
+                name = self._find_method_name(context)
+                if not name:
+                    return await handler(request, context)
+
+                # What if the headers are empty?
+                transaction = Transaction.continue_from_headers(
+                    dict(context.invocation_metadata()),
+                    op=OP.GRPC_SERVER,
+                    name=name,
+                    source=TransactionSource.CUSTOM,
+                    origin=SPAN_ORIGIN,
+                )
+
+                with sentry_sdk.start_transaction(transaction=transaction):
+                    try:
+                        return await handler.unary_unary(request, context)
+                    except AbortError:
+                        raise
+                    except Exception as exc:
+                        event, hint = event_from_exception(
+                            exc,
+                            mechanism={"type": "grpc", "handled": False},
+                        )
+                        sentry_sdk.capture_event(event, hint=hint)
+                        raise
+
+        elif not handler.request_streaming and handler.response_streaming:
+            handler_factory = grpc.unary_stream_rpc_method_handler
+
+            async def wrapped(request, context):  # type: ignore
+                # type: (Any, ServicerContext) -> Any
+                async for r in handler.unary_stream(request, context):
+                    yield r
+
+        elif handler.request_streaming and not handler.response_streaming:
+            handler_factory = grpc.stream_unary_rpc_method_handler
+
+            async def wrapped(request, context):
+                # type: (Any, ServicerContext) -> Any
+                response = handler.stream_unary(request, context)
+                return await response
+
+        elif handler.request_streaming and handler.response_streaming:
+            handler_factory = grpc.stream_stream_rpc_method_handler
+
+            async def wrapped(request, context):  # type: ignore
+                # type: (Any, ServicerContext) -> Any
+                async for r in handler.stream_stream(request, context):
+                    yield r
+
+        return handler_factory(
+            wrapped,
+            request_deserializer=handler.request_deserializer,
+            response_serializer=handler.response_serializer,
+        )
+
+    def _find_name(self, context):
+        # type: (ServicerContext) -> str
+        return self._handler_call_details.method
diff --git a/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/grpc/client.py b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/grpc/client.py
new file mode 100644
index 00000000..a5b4f9f5
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/grpc/client.py
@@ -0,0 +1,92 @@
+import sentry_sdk
+from sentry_sdk.consts import OP
+from sentry_sdk.integrations import DidNotEnable
+from sentry_sdk.integrations.grpc.consts import SPAN_ORIGIN
+
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+    from typing import Any, Callable, Iterator, Iterable, Union
+
+try:
+    import grpc
+    from grpc import ClientCallDetails, Call
+    from grpc._interceptor import _UnaryOutcome
+    from grpc.aio._interceptor import UnaryStreamCall
+    from google.protobuf.message import Message
+except ImportError:
+    raise DidNotEnable("grpcio is not installed")
+
+
+class ClientInterceptor(
+    grpc.UnaryUnaryClientInterceptor, grpc.UnaryStreamClientInterceptor  # type: ignore
+):
+    _is_intercepted = False
+
+    def intercept_unary_unary(self, continuation, client_call_details, request):
+        # type: (ClientInterceptor, Callable[[ClientCallDetails, Message], _UnaryOutcome], ClientCallDetails, Message) -> _UnaryOutcome
+        method = client_call_details.method
+
+        with sentry_sdk.start_span(
+            op=OP.GRPC_CLIENT,
+            name="unary unary call to %s" % method,
+            origin=SPAN_ORIGIN,
+        ) as span:
+            span.set_data("type", "unary unary")
+            span.set_data("method", method)
+
+            client_call_details = self._update_client_call_details_metadata_from_scope(
+                client_call_details
+            )
+
+            response = continuation(client_call_details, request)
+            span.set_data("code", response.code().name)
+
+            return response
+
+    def intercept_unary_stream(self, continuation, client_call_details, request):
+        # type: (ClientInterceptor, Callable[[ClientCallDetails, Message], Union[Iterable[Any], UnaryStreamCall]], ClientCallDetails, Message) -> Union[Iterator[Message], Call]
+        method = client_call_details.method
+
+        with sentry_sdk.start_span(
+            op=OP.GRPC_CLIENT,
+            name="unary stream call to %s" % method,
+            origin=SPAN_ORIGIN,
+        ) as span:
+            span.set_data("type", "unary stream")
+            span.set_data("method", method)
+
+            client_call_details = self._update_client_call_details_metadata_from_scope(
+                client_call_details
+            )
+
+            response = continuation(
+                client_call_details, request
+            )  # type: UnaryStreamCall
+            # Setting code on unary-stream leads to execution getting stuck
+            # span.set_data("code", response.code().name)
+
+            return response
+
+    @staticmethod
+    def _update_client_call_details_metadata_from_scope(client_call_details):
+        # type: (ClientCallDetails) -> ClientCallDetails
+        metadata = (
+            list(client_call_details.metadata) if client_call_details.metadata else []
+        )
+        for (
+            key,
+            value,
+        ) in sentry_sdk.get_current_scope().iter_trace_propagation_headers():
+            metadata.append((key, value))
+
+        client_call_details = grpc._interceptor._ClientCallDetails(
+            method=client_call_details.method,
+            timeout=client_call_details.timeout,
+            metadata=metadata,
+            credentials=client_call_details.credentials,
+            wait_for_ready=client_call_details.wait_for_ready,
+            compression=client_call_details.compression,
+        )
+
+        return client_call_details
diff --git a/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/grpc/consts.py b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/grpc/consts.py
new file mode 100644
index 00000000..9fdb975c
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/grpc/consts.py
@@ -0,0 +1 @@
+SPAN_ORIGIN = "auto.grpc.grpc"
diff --git a/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/grpc/server.py b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/grpc/server.py
new file mode 100644
index 00000000..0d2792d1
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/grpc/server.py
@@ -0,0 +1,66 @@
+import sentry_sdk
+from sentry_sdk.consts import OP
+from sentry_sdk.integrations import DidNotEnable
+from sentry_sdk.integrations.grpc.consts import SPAN_ORIGIN
+from sentry_sdk.tracing import Transaction, TransactionSource
+
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+    from typing import Callable, Optional
+    from google.protobuf.message import Message
+
+try:
+    import grpc
+    from grpc import ServicerContext, HandlerCallDetails, RpcMethodHandler
+except ImportError:
+    raise DidNotEnable("grpcio is not installed")
+
+
+class ServerInterceptor(grpc.ServerInterceptor):  # type: ignore
+    def __init__(self, find_name=None):
+        # type: (ServerInterceptor, Optional[Callable[[ServicerContext], str]]) -> None
+        self._find_method_name = find_name or ServerInterceptor._find_name
+
+        super().__init__()
+
+    def intercept_service(self, continuation, handler_call_details):
+        # type: (ServerInterceptor, Callable[[HandlerCallDetails], RpcMethodHandler], HandlerCallDetails) -> RpcMethodHandler
+        handler = continuation(handler_call_details)
+        if not handler or not handler.unary_unary:
+            return handler
+
+        def behavior(request, context):
+            # type: (Message, ServicerContext) -> Message
+            with sentry_sdk.isolation_scope():
+                name = self._find_method_name(context)
+
+                if name:
+                    metadata = dict(context.invocation_metadata())
+
+                    transaction = Transaction.continue_from_headers(
+                        metadata,
+                        op=OP.GRPC_SERVER,
+                        name=name,
+                        source=TransactionSource.CUSTOM,
+                        origin=SPAN_ORIGIN,
+                    )
+
+                    with sentry_sdk.start_transaction(transaction=transaction):
+                        try:
+                            return handler.unary_unary(request, context)
+                        except BaseException as e:
+                            raise e
+                else:
+                    return handler.unary_unary(request, context)
+
+        return grpc.unary_unary_rpc_method_handler(
+            behavior,
+            request_deserializer=handler.request_deserializer,
+            response_serializer=handler.response_serializer,
+        )
+
+    @staticmethod
+    def _find_name(context):
+        # type: (ServicerContext) -> str
+        return context._rpc_event.call_details.method.decode()
diff --git a/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/httpx.py b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/httpx.py
new file mode 100644
index 00000000..2ddd4448
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/httpx.py
@@ -0,0 +1,167 @@
+import sentry_sdk
+from sentry_sdk.consts import OP, SPANDATA
+from sentry_sdk.integrations import Integration, DidNotEnable
+from sentry_sdk.tracing import BAGGAGE_HEADER_NAME
+from sentry_sdk.tracing_utils import Baggage, should_propagate_trace
+from sentry_sdk.utils import (
+    SENSITIVE_DATA_SUBSTITUTE,
+    capture_internal_exceptions,
+    ensure_integration_enabled,
+    logger,
+    parse_url,
+)
+
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+    from collections.abc import MutableMapping
+    from typing import Any
+
+
+try:
+    from httpx import AsyncClient, Client, Request, Response  # type: ignore
+except ImportError:
+    raise DidNotEnable("httpx is not installed")
+
+__all__ = ["HttpxIntegration"]
+
+
+class HttpxIntegration(Integration):
+    identifier = "httpx"
+    origin = f"auto.http.{identifier}"
+
+    @staticmethod
+    def setup_once():
+        # type: () -> None
+        """
+        httpx has its own transport layer and can be customized when needed,
+        so patch Client.send and AsyncClient.send to support both synchronous and async interfaces.
+        """
+        _install_httpx_client()
+        _install_httpx_async_client()
+
+
+def _install_httpx_client():
+    # type: () -> None
+    real_send = Client.send
+
+    @ensure_integration_enabled(HttpxIntegration, real_send)
+    def send(self, request, **kwargs):
+        # type: (Client, Request, **Any) -> Response
+        parsed_url = None
+        with capture_internal_exceptions():
+            parsed_url = parse_url(str(request.url), sanitize=False)
+
+        with sentry_sdk.start_span(
+            op=OP.HTTP_CLIENT,
+            name="%s %s"
+            % (
+                request.method,
+                parsed_url.url if parsed_url else SENSITIVE_DATA_SUBSTITUTE,
+            ),
+            origin=HttpxIntegration.origin,
+        ) as span:
+            span.set_data(SPANDATA.HTTP_METHOD, request.method)
+            if parsed_url is not None:
+                span.set_data("url", parsed_url.url)
+                span.set_data(SPANDATA.HTTP_QUERY, parsed_url.query)
+                span.set_data(SPANDATA.HTTP_FRAGMENT, parsed_url.fragment)
+
+            if should_propagate_trace(sentry_sdk.get_client(), str(request.url)):
+                for (
+                    key,
+                    value,
+                ) in sentry_sdk.get_current_scope().iter_trace_propagation_headers():
+                    logger.debug(
+                        "[Tracing] Adding `{key}` header {value} to outgoing request to {url}.".format(
+                            key=key, value=value, url=request.url
+                        )
+                    )
+
+                    if key == BAGGAGE_HEADER_NAME:
+                        _add_sentry_baggage_to_headers(request.headers, value)
+                    else:
+                        request.headers[key] = value
+
+            rv = real_send(self, request, **kwargs)
+
+            span.set_http_status(rv.status_code)
+            span.set_data("reason", rv.reason_phrase)
+
+            return rv
+
+    Client.send = send
+
+
+def _install_httpx_async_client():
+    # type: () -> None
+    real_send = AsyncClient.send
+
+    async def send(self, request, **kwargs):
+        # type: (AsyncClient, Request, **Any) -> Response
+        if sentry_sdk.get_client().get_integration(HttpxIntegration) is None:
+            return await real_send(self, request, **kwargs)
+
+        parsed_url = None
+        with capture_internal_exceptions():
+            parsed_url = parse_url(str(request.url), sanitize=False)
+
+        with sentry_sdk.start_span(
+            op=OP.HTTP_CLIENT,
+            name="%s %s"
+            % (
+                request.method,
+                parsed_url.url if parsed_url else SENSITIVE_DATA_SUBSTITUTE,
+            ),
+            origin=HttpxIntegration.origin,
+        ) as span:
+            span.set_data(SPANDATA.HTTP_METHOD, request.method)
+            if parsed_url is not None:
+                span.set_data("url", parsed_url.url)
+                span.set_data(SPANDATA.HTTP_QUERY, parsed_url.query)
+                span.set_data(SPANDATA.HTTP_FRAGMENT, parsed_url.fragment)
+
+            if should_propagate_trace(sentry_sdk.get_client(), str(request.url)):
+                for (
+                    key,
+                    value,
+                ) in sentry_sdk.get_current_scope().iter_trace_propagation_headers():
+                    logger.debug(
+                        "[Tracing] Adding `{key}` header {value} to outgoing request to {url}.".format(
+                            key=key, value=value, url=request.url
+                        )
+                    )
+                    if key == BAGGAGE_HEADER_NAME and request.headers.get(
+                        BAGGAGE_HEADER_NAME
+                    ):
+                        # do not overwrite any existing baggage, just append to it
+                        request.headers[key] += "," + value
+                    else:
+                        request.headers[key] = value
+
+            rv = await real_send(self, request, **kwargs)
+
+            span.set_http_status(rv.status_code)
+            span.set_data("reason", rv.reason_phrase)
+
+            return rv
+
+    AsyncClient.send = send
+
+
+def _add_sentry_baggage_to_headers(headers, sentry_baggage):
+    # type: (MutableMapping[str, str], str) -> None
+    """Add the Sentry baggage to the headers.
+
+    This function directly mutates the provided headers. The provided sentry_baggage
+    is appended to the existing baggage. If the baggage already contains Sentry items,
+    they are stripped out first.
+    """
+    existing_baggage = headers.get(BAGGAGE_HEADER_NAME, "")
+    stripped_existing_baggage = Baggage.strip_sentry_baggage(existing_baggage)
+
+    separator = "," if len(stripped_existing_baggage) > 0 else ""
+
+    headers[BAGGAGE_HEADER_NAME] = (
+        stripped_existing_baggage + separator + sentry_baggage
+    )
diff --git a/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/huey.py b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/huey.py
new file mode 100644
index 00000000..f0aff4c0
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/huey.py
@@ -0,0 +1,174 @@
+import sys
+from datetime import datetime
+
+import sentry_sdk
+from sentry_sdk.api import continue_trace, get_baggage, get_traceparent
+from sentry_sdk.consts import OP, SPANSTATUS
+from sentry_sdk.integrations import DidNotEnable, Integration
+from sentry_sdk.scope import should_send_default_pii
+from sentry_sdk.tracing import (
+    BAGGAGE_HEADER_NAME,
+    SENTRY_TRACE_HEADER_NAME,
+    TransactionSource,
+)
+from sentry_sdk.utils import (
+    capture_internal_exceptions,
+    ensure_integration_enabled,
+    event_from_exception,
+    SENSITIVE_DATA_SUBSTITUTE,
+    reraise,
+)
+
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+    from typing import Any, Callable, Optional, Union, TypeVar
+
+    from sentry_sdk._types import EventProcessor, Event, Hint
+    from sentry_sdk.utils import ExcInfo
+
+    F = TypeVar("F", bound=Callable[..., Any])
+
+try:
+    from huey.api import Huey, Result, ResultGroup, Task, PeriodicTask
+    from huey.exceptions import CancelExecution, RetryTask, TaskLockedException
+except ImportError:
+    raise DidNotEnable("Huey is not installed")
+
+
+HUEY_CONTROL_FLOW_EXCEPTIONS = (CancelExecution, RetryTask, TaskLockedException)
+
+
+class HueyIntegration(Integration):
+    identifier = "huey"
+    origin = f"auto.queue.{identifier}"
+
+    @staticmethod
+    def setup_once():
+        # type: () -> None
+        patch_enqueue()
+        patch_execute()
+
+
+def patch_enqueue():
+    # type: () -> None
+    old_enqueue = Huey.enqueue
+
+    @ensure_integration_enabled(HueyIntegration, old_enqueue)
+    def _sentry_enqueue(self, task):
+        # type: (Huey, Task) -> Optional[Union[Result, ResultGroup]]
+        with sentry_sdk.start_span(
+            op=OP.QUEUE_SUBMIT_HUEY,
+            name=task.name,
+            origin=HueyIntegration.origin,
+        ):
+            if not isinstance(task, PeriodicTask):
+                # Attach trace propagation data to task kwargs. We do
+                # not do this for periodic tasks, as these don't
+                # really have an originating transaction.
+                task.kwargs["sentry_headers"] = {
+                    BAGGAGE_HEADER_NAME: get_baggage(),
+                    SENTRY_TRACE_HEADER_NAME: get_traceparent(),
+                }
+            return old_enqueue(self, task)
+
+    Huey.enqueue = _sentry_enqueue
+
+
+def _make_event_processor(task):
+    # type: (Any) -> EventProcessor
+    def event_processor(event, hint):
+        # type: (Event, Hint) -> Optional[Event]
+
+        with capture_internal_exceptions():
+            tags = event.setdefault("tags", {})
+            tags["huey_task_id"] = task.id
+            tags["huey_task_retry"] = task.default_retries > task.retries
+            extra = event.setdefault("extra", {})
+            extra["huey-job"] = {
+                "task": task.name,
+                "args": (
+                    task.args
+                    if should_send_default_pii()
+                    else SENSITIVE_DATA_SUBSTITUTE
+                ),
+                "kwargs": (
+                    task.kwargs
+                    if should_send_default_pii()
+                    else SENSITIVE_DATA_SUBSTITUTE
+                ),
+                "retry": (task.default_retries or 0) - task.retries,
+            }
+
+        return event
+
+    return event_processor
+
+
+def _capture_exception(exc_info):
+    # type: (ExcInfo) -> None
+    scope = sentry_sdk.get_current_scope()
+
+    if exc_info[0] in HUEY_CONTROL_FLOW_EXCEPTIONS:
+        scope.transaction.set_status(SPANSTATUS.ABORTED)
+        return
+
+    scope.transaction.set_status(SPANSTATUS.INTERNAL_ERROR)
+    event, hint = event_from_exception(
+        exc_info,
+        client_options=sentry_sdk.get_client().options,
+        mechanism={"type": HueyIntegration.identifier, "handled": False},
+    )
+    scope.capture_event(event, hint=hint)
+
+
+def _wrap_task_execute(func):
+    # type: (F) -> F
+
+    @ensure_integration_enabled(HueyIntegration, func)
+    def _sentry_execute(*args, **kwargs):
+        # type: (*Any, **Any) -> Any
+        try:
+            result = func(*args, **kwargs)
+        except Exception:
+            exc_info = sys.exc_info()
+            _capture_exception(exc_info)
+            reraise(*exc_info)
+
+        return result
+
+    return _sentry_execute  # type: ignore
+
+
+def patch_execute():
+    # type: () -> None
+    old_execute = Huey._execute
+
+    @ensure_integration_enabled(HueyIntegration, old_execute)
+    def _sentry_execute(self, task, timestamp=None):
+        # type: (Huey, Task, Optional[datetime]) -> Any
+        with sentry_sdk.isolation_scope() as scope:
+            with capture_internal_exceptions():
+                scope._name = "huey"
+                scope.clear_breadcrumbs()
+                scope.add_event_processor(_make_event_processor(task))
+
+            sentry_headers = task.kwargs.pop("sentry_headers", None)
+
+            transaction = continue_trace(
+                sentry_headers or {},
+                name=task.name,
+                op=OP.QUEUE_TASK_HUEY,
+                source=TransactionSource.TASK,
+                origin=HueyIntegration.origin,
+            )
+            transaction.set_status(SPANSTATUS.OK)
+
+            if not getattr(task, "_sentry_is_patched", False):
+                task.execute = _wrap_task_execute(task.execute)
+                task._sentry_is_patched = True
+
+            with sentry_sdk.start_transaction(transaction):
+                return old_execute(self, task, timestamp)
+
+    Huey._execute = _sentry_execute
diff --git a/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/huggingface_hub.py b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/huggingface_hub.py
new file mode 100644
index 00000000..d09f6e21
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/huggingface_hub.py
@@ -0,0 +1,175 @@
+from functools import wraps
+
+from sentry_sdk import consts
+from sentry_sdk.ai.monitoring import record_token_usage
+from sentry_sdk.ai.utils import set_data_normalized
+from sentry_sdk.consts import SPANDATA
+
+from typing import Any, Iterable, Callable
+
+import sentry_sdk
+from sentry_sdk.scope import should_send_default_pii
+from sentry_sdk.integrations import DidNotEnable, Integration
+from sentry_sdk.utils import (
+    capture_internal_exceptions,
+    event_from_exception,
+)
+
+try:
+    import huggingface_hub.inference._client
+
+    from huggingface_hub import ChatCompletionStreamOutput, TextGenerationOutput
+except ImportError:
+    raise DidNotEnable("Huggingface not installed")
+
+
+class HuggingfaceHubIntegration(Integration):
+    identifier = "huggingface_hub"
+    origin = f"auto.ai.{identifier}"
+
+    def __init__(self, include_prompts=True):
+        # type: (HuggingfaceHubIntegration, bool) -> None
+        self.include_prompts = include_prompts
+
+    @staticmethod
+    def setup_once():
+        # type: () -> None
+        huggingface_hub.inference._client.InferenceClient.text_generation = (
+            _wrap_text_generation(
+                huggingface_hub.inference._client.InferenceClient.text_generation
+            )
+        )
+
+
+def _capture_exception(exc):
+    # type: (Any) -> None
+    event, hint = event_from_exception(
+        exc,
+        client_options=sentry_sdk.get_client().options,
+        mechanism={"type": "huggingface_hub", "handled": False},
+    )
+    sentry_sdk.capture_event(event, hint=hint)
+
+
+def _wrap_text_generation(f):
+    # type: (Callable[..., Any]) -> Callable[..., Any]
+    @wraps(f)
+    def new_text_generation(*args, **kwargs):
+        # type: (*Any, **Any) -> Any
+        integration = sentry_sdk.get_client().get_integration(HuggingfaceHubIntegration)
+        if integration is None:
+            return f(*args, **kwargs)
+
+        if "prompt" in kwargs:
+            prompt = kwargs["prompt"]
+        elif len(args) >= 2:
+            kwargs["prompt"] = args[1]
+            prompt = kwargs["prompt"]
+            args = (args[0],) + args[2:]
+        else:
+            # invalid call, let it return error
+            return f(*args, **kwargs)
+
+        model = kwargs.get("model")
+        streaming = kwargs.get("stream")
+
+        span = sentry_sdk.start_span(
+            op=consts.OP.HUGGINGFACE_HUB_CHAT_COMPLETIONS_CREATE,
+            name="Text Generation",
+            origin=HuggingfaceHubIntegration.origin,
+        )
+        span.__enter__()
+        try:
+            res = f(*args, **kwargs)
+        except Exception as e:
+            _capture_exception(e)
+            span.__exit__(None, None, None)
+            raise e from None
+
+        with capture_internal_exceptions():
+            if should_send_default_pii() and integration.include_prompts:
+                set_data_normalized(span, SPANDATA.AI_INPUT_MESSAGES, prompt)
+
+            set_data_normalized(span, SPANDATA.AI_MODEL_ID, model)
+            set_data_normalized(span, SPANDATA.AI_STREAMING, streaming)
+
+            if isinstance(res, str):
+                if should_send_default_pii() and integration.include_prompts:
+                    set_data_normalized(
+                        span,
+                        "ai.responses",
+                        [res],
+                    )
+                span.__exit__(None, None, None)
+                return res
+
+            if isinstance(res, TextGenerationOutput):
+                if should_send_default_pii() and integration.include_prompts:
+                    set_data_normalized(
+                        span,
+                        "ai.responses",
+                        [res.generated_text],
+                    )
+                if res.details is not None and res.details.generated_tokens > 0:
+                    record_token_usage(span, total_tokens=res.details.generated_tokens)
+                span.__exit__(None, None, None)
+                return res
+
+            if not isinstance(res, Iterable):
+                # we only know how to deal with strings and iterables, ignore
+                set_data_normalized(span, "unknown_response", True)
+                span.__exit__(None, None, None)
+                return res
+
+            if kwargs.get("details", False):
+                # res is Iterable[TextGenerationStreamOutput]
+                def new_details_iterator():
+                    # type: () -> Iterable[ChatCompletionStreamOutput]
+                    with capture_internal_exceptions():
+                        tokens_used = 0
+                        data_buf: list[str] = []
+                        for x in res:
+                            if hasattr(x, "token") and hasattr(x.token, "text"):
+                                data_buf.append(x.token.text)
+                            if hasattr(x, "details") and hasattr(
+                                x.details, "generated_tokens"
+                            ):
+                                tokens_used = x.details.generated_tokens
+                            yield x
+                        if (
+                            len(data_buf) > 0
+                            and should_send_default_pii()
+                            and integration.include_prompts
+                        ):
+                            set_data_normalized(
+                                span, SPANDATA.AI_RESPONSES, "".join(data_buf)
+                            )
+                        if tokens_used > 0:
+                            record_token_usage(span, total_tokens=tokens_used)
+                    span.__exit__(None, None, None)
+
+                return new_details_iterator()
+            else:
+                # res is Iterable[str]
+
+                def new_iterator():
+                    # type: () -> Iterable[str]
+                    data_buf: list[str] = []
+                    with capture_internal_exceptions():
+                        for s in res:
+                            if isinstance(s, str):
+                                data_buf.append(s)
+                            yield s
+                        if (
+                            len(data_buf) > 0
+                            and should_send_default_pii()
+                            and integration.include_prompts
+                        ):
+                            set_data_normalized(
+                                span, SPANDATA.AI_RESPONSES, "".join(data_buf)
+                            )
+                        span.__exit__(None, None, None)
+
+                return new_iterator()
+
+    return new_text_generation
diff --git a/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/langchain.py b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/langchain.py
new file mode 100644
index 00000000..431fc46b
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/langchain.py
@@ -0,0 +1,465 @@
+from collections import OrderedDict
+from functools import wraps
+
+import sentry_sdk
+from sentry_sdk.ai.monitoring import set_ai_pipeline_name, record_token_usage
+from sentry_sdk.consts import OP, SPANDATA
+from sentry_sdk.ai.utils import set_data_normalized
+from sentry_sdk.scope import should_send_default_pii
+from sentry_sdk.tracing import Span
+from sentry_sdk.integrations import DidNotEnable, Integration
+from sentry_sdk.utils import logger, capture_internal_exceptions
+
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+    from typing import Any, List, Callable, Dict, Union, Optional
+    from uuid import UUID
+
+try:
+    from langchain_core.messages import BaseMessage
+    from langchain_core.outputs import LLMResult
+    from langchain_core.callbacks import (
+        manager,
+        BaseCallbackHandler,
+    )
+    from langchain_core.agents import AgentAction, AgentFinish
+except ImportError:
+    raise DidNotEnable("langchain not installed")
+
+
+DATA_FIELDS = {
+    "temperature": SPANDATA.AI_TEMPERATURE,
+    "top_p": SPANDATA.AI_TOP_P,
+    "top_k": SPANDATA.AI_TOP_K,
+    "function_call": SPANDATA.AI_FUNCTION_CALL,
+    "tool_calls": SPANDATA.AI_TOOL_CALLS,
+    "tools": SPANDATA.AI_TOOLS,
+    "response_format": SPANDATA.AI_RESPONSE_FORMAT,
+    "logit_bias": SPANDATA.AI_LOGIT_BIAS,
+    "tags": SPANDATA.AI_TAGS,
+}
+
+# To avoid double collecting tokens, we do *not* measure
+# token counts for models for which we have an explicit integration
+NO_COLLECT_TOKEN_MODELS = [
+    "openai-chat",
+    "anthropic-chat",
+    "cohere-chat",
+    "huggingface_endpoint",
+]
+
+
+class LangchainIntegration(Integration):
+    identifier = "langchain"
+    origin = f"auto.ai.{identifier}"
+
+    # The most number of spans (e.g., LLM calls) that can be processed at the same time.
+    max_spans = 1024
+
+    def __init__(
+        self, include_prompts=True, max_spans=1024, tiktoken_encoding_name=None
+    ):
+        # type: (LangchainIntegration, bool, int, Optional[str]) -> None
+        self.include_prompts = include_prompts
+        self.max_spans = max_spans
+        self.tiktoken_encoding_name = tiktoken_encoding_name
+
+    @staticmethod
+    def setup_once():
+        # type: () -> None
+        manager._configure = _wrap_configure(manager._configure)
+
+
+class WatchedSpan:
+    span = None  # type: Span
+    num_completion_tokens = 0  # type: int
+    num_prompt_tokens = 0  # type: int
+    no_collect_tokens = False  # type: bool
+    children = []  # type: List[WatchedSpan]
+    is_pipeline = False  # type: bool
+
+    def __init__(self, span):
+        # type: (Span) -> None
+        self.span = span
+
+
+class SentryLangchainCallback(BaseCallbackHandler):  # type: ignore[misc]
+    """Base callback handler that can be used to handle callbacks from langchain."""
+
+    span_map = OrderedDict()  # type: OrderedDict[UUID, WatchedSpan]
+
+    max_span_map_size = 0
+
+    def __init__(self, max_span_map_size, include_prompts, tiktoken_encoding_name=None):
+        # type: (int, bool, Optional[str]) -> None
+        self.max_span_map_size = max_span_map_size
+        self.include_prompts = include_prompts
+
+        self.tiktoken_encoding = None
+        if tiktoken_encoding_name is not None:
+            import tiktoken  # type: ignore
+
+            self.tiktoken_encoding = tiktoken.get_encoding(tiktoken_encoding_name)
+
+    def count_tokens(self, s):
+        # type: (str) -> int
+        if self.tiktoken_encoding is not None:
+            return len(self.tiktoken_encoding.encode_ordinary(s))
+        return 0
+
+    def gc_span_map(self):
+        # type: () -> None
+
+        while len(self.span_map) > self.max_span_map_size:
+            run_id, watched_span = self.span_map.popitem(last=False)
+            self._exit_span(watched_span, run_id)
+
+    def _handle_error(self, run_id, error):
+        # type: (UUID, Any) -> None
+        if not run_id or run_id not in self.span_map:
+            return
+
+        span_data = self.span_map[run_id]
+        if not span_data:
+            return
+        sentry_sdk.capture_exception(error, span_data.span.scope)
+        span_data.span.__exit__(None, None, None)
+        del self.span_map[run_id]
+
+    def _normalize_langchain_message(self, message):
+        # type: (BaseMessage) -> Any
+        parsed = {"content": message.content, "role": message.type}
+        parsed.update(message.additional_kwargs)
+        return parsed
+
+    def _create_span(self, run_id, parent_id, **kwargs):
+        # type: (SentryLangchainCallback, UUID, Optional[Any], Any) -> WatchedSpan
+
+        watched_span = None  # type: Optional[WatchedSpan]
+        if parent_id:
+            parent_span = self.span_map.get(parent_id)  # type: Optional[WatchedSpan]
+            if parent_span:
+                watched_span = WatchedSpan(parent_span.span.start_child(**kwargs))
+                parent_span.children.append(watched_span)
+        if watched_span is None:
+            watched_span = WatchedSpan(sentry_sdk.start_span(**kwargs))
+
+        if kwargs.get("op", "").startswith("ai.pipeline."):
+            if kwargs.get("name"):
+                set_ai_pipeline_name(kwargs.get("name"))
+            watched_span.is_pipeline = True
+
+        watched_span.span.__enter__()
+        self.span_map[run_id] = watched_span
+        self.gc_span_map()
+        return watched_span
+
+    def _exit_span(self, span_data, run_id):
+        # type: (SentryLangchainCallback, WatchedSpan, UUID) -> None
+
+        if span_data.is_pipeline:
+            set_ai_pipeline_name(None)
+
+        span_data.span.__exit__(None, None, None)
+        del self.span_map[run_id]
+
+    def on_llm_start(
+        self,
+        serialized,
+        prompts,
+        *,
+        run_id,
+        tags=None,
+        parent_run_id=None,
+        metadata=None,
+        **kwargs,
+    ):
+        # type: (SentryLangchainCallback, Dict[str, Any], List[str], UUID, Optional[List[str]], Optional[UUID], Optional[Dict[str, Any]], Any) -> Any
+        """Run when LLM starts running."""
+        with capture_internal_exceptions():
+            if not run_id:
+                return
+            all_params = kwargs.get("invocation_params", {})
+            all_params.update(serialized.get("kwargs", {}))
+            watched_span = self._create_span(
+                run_id,
+                kwargs.get("parent_run_id"),
+                op=OP.LANGCHAIN_RUN,
+                name=kwargs.get("name") or "Langchain LLM call",
+                origin=LangchainIntegration.origin,
+            )
+            span = watched_span.span
+            if should_send_default_pii() and self.include_prompts:
+                set_data_normalized(span, SPANDATA.AI_INPUT_MESSAGES, prompts)
+            for k, v in DATA_FIELDS.items():
+                if k in all_params:
+                    set_data_normalized(span, v, all_params[k])
+
+    def on_chat_model_start(self, serialized, messages, *, run_id, **kwargs):
+        # type: (SentryLangchainCallback, Dict[str, Any], List[List[BaseMessage]], UUID, Any) -> Any
+        """Run when Chat Model starts running."""
+        with capture_internal_exceptions():
+            if not run_id:
+                return
+            all_params = kwargs.get("invocation_params", {})
+            all_params.update(serialized.get("kwargs", {}))
+            watched_span = self._create_span(
+                run_id,
+                kwargs.get("parent_run_id"),
+                op=OP.LANGCHAIN_CHAT_COMPLETIONS_CREATE,
+                name=kwargs.get("name") or "Langchain Chat Model",
+                origin=LangchainIntegration.origin,
+            )
+            span = watched_span.span
+            model = all_params.get(
+                "model", all_params.get("model_name", all_params.get("model_id"))
+            )
+            watched_span.no_collect_tokens = any(
+                x in all_params.get("_type", "") for x in NO_COLLECT_TOKEN_MODELS
+            )
+
+            if not model and "anthropic" in all_params.get("_type"):
+                model = "claude-2"
+            if model:
+                span.set_data(SPANDATA.AI_MODEL_ID, model)
+            if should_send_default_pii() and self.include_prompts:
+                set_data_normalized(
+                    span,
+                    SPANDATA.AI_INPUT_MESSAGES,
+                    [
+                        [self._normalize_langchain_message(x) for x in list_]
+                        for list_ in messages
+                    ],
+                )
+            for k, v in DATA_FIELDS.items():
+                if k in all_params:
+                    set_data_normalized(span, v, all_params[k])
+            if not watched_span.no_collect_tokens:
+                for list_ in messages:
+                    for message in list_:
+                        self.span_map[run_id].num_prompt_tokens += self.count_tokens(
+                            message.content
+                        ) + self.count_tokens(message.type)
+
+    def on_llm_new_token(self, token, *, run_id, **kwargs):
+        # type: (SentryLangchainCallback, str, UUID, Any) -> Any
+        """Run on new LLM token. Only available when streaming is enabled."""
+        with capture_internal_exceptions():
+            if not run_id or run_id not in self.span_map:
+                return
+            span_data = self.span_map[run_id]
+            if not span_data or span_data.no_collect_tokens:
+                return
+            span_data.num_completion_tokens += self.count_tokens(token)
+
+    def on_llm_end(self, response, *, run_id, **kwargs):
+        # type: (SentryLangchainCallback, LLMResult, UUID, Any) -> Any
+        """Run when LLM ends running."""
+        with capture_internal_exceptions():
+            if not run_id:
+                return
+
+            token_usage = (
+                response.llm_output.get("token_usage") if response.llm_output else None
+            )
+
+            span_data = self.span_map[run_id]
+            if not span_data:
+                return
+
+            if should_send_default_pii() and self.include_prompts:
+                set_data_normalized(
+                    span_data.span,
+                    SPANDATA.AI_RESPONSES,
+                    [[x.text for x in list_] for list_ in response.generations],
+                )
+
+            if not span_data.no_collect_tokens:
+                if token_usage:
+                    record_token_usage(
+                        span_data.span,
+                        token_usage.get("prompt_tokens"),
+                        token_usage.get("completion_tokens"),
+                        token_usage.get("total_tokens"),
+                    )
+                else:
+                    record_token_usage(
+                        span_data.span,
+                        span_data.num_prompt_tokens,
+                        span_data.num_completion_tokens,
+                    )
+
+            self._exit_span(span_data, run_id)
+
+    def on_llm_error(self, error, *, run_id, **kwargs):
+        # type: (SentryLangchainCallback, Union[Exception, KeyboardInterrupt], UUID, Any) -> Any
+        """Run when LLM errors."""
+        with capture_internal_exceptions():
+            self._handle_error(run_id, error)
+
+    def on_chain_start(self, serialized, inputs, *, run_id, **kwargs):
+        # type: (SentryLangchainCallback, Dict[str, Any], Dict[str, Any], UUID, Any) -> Any
+        """Run when chain starts running."""
+        with capture_internal_exceptions():
+            if not run_id:
+                return
+            watched_span = self._create_span(
+                run_id,
+                kwargs.get("parent_run_id"),
+                op=(
+                    OP.LANGCHAIN_RUN
+                    if kwargs.get("parent_run_id") is not None
+                    else OP.LANGCHAIN_PIPELINE
+                ),
+                name=kwargs.get("name") or "Chain execution",
+                origin=LangchainIntegration.origin,
+            )
+            metadata = kwargs.get("metadata")
+            if metadata:
+                set_data_normalized(watched_span.span, SPANDATA.AI_METADATA, metadata)
+
+    def on_chain_end(self, outputs, *, run_id, **kwargs):
+        # type: (SentryLangchainCallback, Dict[str, Any], UUID, Any) -> Any
+        """Run when chain ends running."""
+        with capture_internal_exceptions():
+            if not run_id or run_id not in self.span_map:
+                return
+
+            span_data = self.span_map[run_id]
+            if not span_data:
+                return
+            self._exit_span(span_data, run_id)
+
+    def on_chain_error(self, error, *, run_id, **kwargs):
+        # type: (SentryLangchainCallback, Union[Exception, KeyboardInterrupt], UUID, Any) -> Any
+        """Run when chain errors."""
+        self._handle_error(run_id, error)
+
+    def on_agent_action(self, action, *, run_id, **kwargs):
+        # type: (SentryLangchainCallback, AgentAction, UUID, Any) -> Any
+        with capture_internal_exceptions():
+            if not run_id:
+                return
+            watched_span = self._create_span(
+                run_id,
+                kwargs.get("parent_run_id"),
+                op=OP.LANGCHAIN_AGENT,
+                name=action.tool or "AI tool usage",
+                origin=LangchainIntegration.origin,
+            )
+            if action.tool_input and should_send_default_pii() and self.include_prompts:
+                set_data_normalized(
+                    watched_span.span, SPANDATA.AI_INPUT_MESSAGES, action.tool_input
+                )
+
+    def on_agent_finish(self, finish, *, run_id, **kwargs):
+        # type: (SentryLangchainCallback, AgentFinish, UUID, Any) -> Any
+        with capture_internal_exceptions():
+            if not run_id:
+                return
+
+            span_data = self.span_map[run_id]
+            if not span_data:
+                return
+            if should_send_default_pii() and self.include_prompts:
+                set_data_normalized(
+                    span_data.span, SPANDATA.AI_RESPONSES, finish.return_values.items()
+                )
+            self._exit_span(span_data, run_id)
+
+    def on_tool_start(self, serialized, input_str, *, run_id, **kwargs):
+        # type: (SentryLangchainCallback, Dict[str, Any], str, UUID, Any) -> Any
+        """Run when tool starts running."""
+        with capture_internal_exceptions():
+            if not run_id:
+                return
+            watched_span = self._create_span(
+                run_id,
+                kwargs.get("parent_run_id"),
+                op=OP.LANGCHAIN_TOOL,
+                name=serialized.get("name") or kwargs.get("name") or "AI tool usage",
+                origin=LangchainIntegration.origin,
+            )
+            if should_send_default_pii() and self.include_prompts:
+                set_data_normalized(
+                    watched_span.span,
+                    SPANDATA.AI_INPUT_MESSAGES,
+                    kwargs.get("inputs", [input_str]),
+                )
+                if kwargs.get("metadata"):
+                    set_data_normalized(
+                        watched_span.span, SPANDATA.AI_METADATA, kwargs.get("metadata")
+                    )
+
+    def on_tool_end(self, output, *, run_id, **kwargs):
+        # type: (SentryLangchainCallback, str, UUID, Any) -> Any
+        """Run when tool ends running."""
+        with capture_internal_exceptions():
+            if not run_id or run_id not in self.span_map:
+                return
+
+            span_data = self.span_map[run_id]
+            if not span_data:
+                return
+            if should_send_default_pii() and self.include_prompts:
+                set_data_normalized(span_data.span, SPANDATA.AI_RESPONSES, output)
+            self._exit_span(span_data, run_id)
+
+    def on_tool_error(self, error, *args, run_id, **kwargs):
+        # type: (SentryLangchainCallback, Union[Exception, KeyboardInterrupt], UUID, Any) -> Any
+        """Run when tool errors."""
+        self._handle_error(run_id, error)
+
+
+def _wrap_configure(f):
+    # type: (Callable[..., Any]) -> Callable[..., Any]
+
+    @wraps(f)
+    def new_configure(*args, **kwargs):
+        # type: (Any, Any) -> Any
+
+        integration = sentry_sdk.get_client().get_integration(LangchainIntegration)
+        if integration is None:
+            return f(*args, **kwargs)
+
+        with capture_internal_exceptions():
+            new_callbacks = []  # type: List[BaseCallbackHandler]
+            if "local_callbacks" in kwargs:
+                existing_callbacks = kwargs["local_callbacks"]
+                kwargs["local_callbacks"] = new_callbacks
+            elif len(args) > 2:
+                existing_callbacks = args[2]
+                args = (
+                    args[0],
+                    args[1],
+                    new_callbacks,
+                ) + args[3:]
+            else:
+                existing_callbacks = []
+
+            if existing_callbacks:
+                if isinstance(existing_callbacks, list):
+                    for cb in existing_callbacks:
+                        new_callbacks.append(cb)
+                elif isinstance(existing_callbacks, BaseCallbackHandler):
+                    new_callbacks.append(existing_callbacks)
+                else:
+                    logger.debug("Unknown callback type: %s", existing_callbacks)
+
+            already_added = False
+            for callback in new_callbacks:
+                if isinstance(callback, SentryLangchainCallback):
+                    already_added = True
+
+            if not already_added:
+                new_callbacks.append(
+                    SentryLangchainCallback(
+                        integration.max_spans,
+                        integration.include_prompts,
+                        integration.tiktoken_encoding_name,
+                    )
+                )
+        return f(*args, **kwargs)
+
+    return new_configure
diff --git a/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/launchdarkly.py b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/launchdarkly.py
new file mode 100644
index 00000000..cb9e9114
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/launchdarkly.py
@@ -0,0 +1,62 @@
+from typing import TYPE_CHECKING
+import sentry_sdk
+
+from sentry_sdk.integrations import DidNotEnable, Integration
+
+try:
+    import ldclient
+    from ldclient.hook import Hook, Metadata
+
+    if TYPE_CHECKING:
+        from ldclient import LDClient
+        from ldclient.hook import EvaluationSeriesContext
+        from ldclient.evaluation import EvaluationDetail
+
+        from typing import Any
+except ImportError:
+    raise DidNotEnable("LaunchDarkly is not installed")
+
+
+class LaunchDarklyIntegration(Integration):
+    identifier = "launchdarkly"
+
+    def __init__(self, ld_client=None):
+        # type: (LDClient | None) -> None
+        """
+        :param client: An initialized LDClient instance. If a client is not provided, this
+            integration will attempt to use the shared global instance.
+        """
+        try:
+            client = ld_client or ldclient.get()
+        except Exception as exc:
+            raise DidNotEnable("Error getting LaunchDarkly client. " + repr(exc))
+
+        if not client.is_initialized():
+            raise DidNotEnable("LaunchDarkly client is not initialized.")
+
+        # Register the flag collection hook with the LD client.
+        client.add_hook(LaunchDarklyHook())
+
+    @staticmethod
+    def setup_once():
+        # type: () -> None
+        pass
+
+
+class LaunchDarklyHook(Hook):
+
+    @property
+    def metadata(self):
+        # type: () -> Metadata
+        return Metadata(name="sentry-flag-auditor")
+
+    def after_evaluation(self, series_context, data, detail):
+        # type: (EvaluationSeriesContext, dict[Any, Any], EvaluationDetail) -> dict[Any, Any]
+        if isinstance(detail.value, bool):
+            flags = sentry_sdk.get_current_scope().flags
+            flags.set(series_context.key, detail.value)
+        return data
+
+    def before_evaluation(self, series_context, data):
+        # type: (EvaluationSeriesContext, dict[Any, Any]) -> dict[Any, Any]
+        return data  # No-op.
diff --git a/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/litestar.py b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/litestar.py
new file mode 100644
index 00000000..5f0b32b0
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/litestar.py
@@ -0,0 +1,306 @@
+from collections.abc import Set
+import sentry_sdk
+from sentry_sdk.consts import OP
+from sentry_sdk.integrations import (
+    _DEFAULT_FAILED_REQUEST_STATUS_CODES,
+    DidNotEnable,
+    Integration,
+)
+from sentry_sdk.integrations.asgi import SentryAsgiMiddleware
+from sentry_sdk.integrations.logging import ignore_logger
+from sentry_sdk.scope import should_send_default_pii
+from sentry_sdk.tracing import TransactionSource, SOURCE_FOR_STYLE
+from sentry_sdk.utils import (
+    ensure_integration_enabled,
+    event_from_exception,
+    transaction_from_function,
+)
+
+try:
+    from litestar import Request, Litestar  # type: ignore
+    from litestar.handlers.base import BaseRouteHandler  # type: ignore
+    from litestar.middleware import DefineMiddleware  # type: ignore
+    from litestar.routes.http import HTTPRoute  # type: ignore
+    from litestar.data_extractors import ConnectionDataExtractor  # type: ignore
+    from litestar.exceptions import HTTPException  # type: ignore
+except ImportError:
+    raise DidNotEnable("Litestar is not installed")
+
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+    from typing import Any, Optional, Union
+    from litestar.types.asgi_types import ASGIApp  # type: ignore
+    from litestar.types import (  # type: ignore
+        HTTPReceiveMessage,
+        HTTPScope,
+        Message,
+        Middleware,
+        Receive,
+        Scope as LitestarScope,
+        Send,
+        WebSocketReceiveMessage,
+    )
+    from litestar.middleware import MiddlewareProtocol
+    from sentry_sdk._types import Event, Hint
+
+_DEFAULT_TRANSACTION_NAME = "generic Litestar request"
+
+
+class LitestarIntegration(Integration):
+    identifier = "litestar"
+    origin = f"auto.http.{identifier}"
+
+    def __init__(
+        self,
+        failed_request_status_codes=_DEFAULT_FAILED_REQUEST_STATUS_CODES,  # type: Set[int]
+    ) -> None:
+        self.failed_request_status_codes = failed_request_status_codes
+
+    @staticmethod
+    def setup_once():
+        # type: () -> None
+        patch_app_init()
+        patch_middlewares()
+        patch_http_route_handle()
+
+        # The following line follows the pattern found in other integrations such as `DjangoIntegration.setup_once`.
+        # The Litestar `ExceptionHandlerMiddleware.__call__` catches exceptions and does the following
+        # (among other things):
+        #   1. Logs them, some at least (such as 500s) as errors
+        #   2. Calls after_exception hooks
+        # The `LitestarIntegration`` provides an after_exception hook (see `patch_app_init` below) to create a Sentry event
+        # from an exception, which ends up being called during step 2 above. However, the Sentry `LoggingIntegration` will
+        # by default create a Sentry event from error logs made in step 1 if we do not prevent it from doing so.
+        ignore_logger("litestar")
+
+
+class SentryLitestarASGIMiddleware(SentryAsgiMiddleware):
+    def __init__(self, app, span_origin=LitestarIntegration.origin):
+        # type: (ASGIApp, str) -> None
+
+        super().__init__(
+            app=app,
+            unsafe_context_data=False,
+            transaction_style="endpoint",
+            mechanism_type="asgi",
+            span_origin=span_origin,
+        )
+
+
+def patch_app_init():
+    # type: () -> None
+    """
+    Replaces the Litestar class's `__init__` function in order to inject `after_exception` handlers and set the
+    `SentryLitestarASGIMiddleware` as the outmost middleware in the stack.
+    See:
+    - https://docs.litestar.dev/2/usage/applications.html#after-exception
+    - https://docs.litestar.dev/2/usage/middleware/using-middleware.html
+    """
+    old__init__ = Litestar.__init__
+
+    @ensure_integration_enabled(LitestarIntegration, old__init__)
+    def injection_wrapper(self, *args, **kwargs):
+        # type: (Litestar, *Any, **Any) -> None
+        kwargs["after_exception"] = [
+            exception_handler,
+            *(kwargs.get("after_exception") or []),
+        ]
+
+        SentryLitestarASGIMiddleware.__call__ = SentryLitestarASGIMiddleware._run_asgi3  # type: ignore
+        middleware = kwargs.get("middleware") or []
+        kwargs["middleware"] = [SentryLitestarASGIMiddleware, *middleware]
+        old__init__(self, *args, **kwargs)
+
+    Litestar.__init__ = injection_wrapper
+
+
+def patch_middlewares():
+    # type: () -> None
+    old_resolve_middleware_stack = BaseRouteHandler.resolve_middleware
+
+    @ensure_integration_enabled(LitestarIntegration, old_resolve_middleware_stack)
+    def resolve_middleware_wrapper(self):
+        # type: (BaseRouteHandler) -> list[Middleware]
+        return [
+            enable_span_for_middleware(middleware)
+            for middleware in old_resolve_middleware_stack(self)
+        ]
+
+    BaseRouteHandler.resolve_middleware = resolve_middleware_wrapper
+
+
+def enable_span_for_middleware(middleware):
+    # type: (Middleware) -> Middleware
+    if (
+        not hasattr(middleware, "__call__")  # noqa: B004
+        or middleware is SentryLitestarASGIMiddleware
+    ):
+        return middleware
+
+    if isinstance(middleware, DefineMiddleware):
+        old_call = middleware.middleware.__call__  # type: ASGIApp
+    else:
+        old_call = middleware.__call__
+
+    async def _create_span_call(self, scope, receive, send):
+        # type: (MiddlewareProtocol, LitestarScope, Receive, Send) -> None
+        if sentry_sdk.get_client().get_integration(LitestarIntegration) is None:
+            return await old_call(self, scope, receive, send)
+
+        middleware_name = self.__class__.__name__
+        with sentry_sdk.start_span(
+            op=OP.MIDDLEWARE_LITESTAR,
+            name=middleware_name,
+            origin=LitestarIntegration.origin,
+        ) as middleware_span:
+            middleware_span.set_tag("litestar.middleware_name", middleware_name)
+
+            # Creating spans for the "receive" callback
+            async def _sentry_receive(*args, **kwargs):
+                # type: (*Any, **Any) -> Union[HTTPReceiveMessage, WebSocketReceiveMessage]
+                if sentry_sdk.get_client().get_integration(LitestarIntegration) is None:
+                    return await receive(*args, **kwargs)
+                with sentry_sdk.start_span(
+                    op=OP.MIDDLEWARE_LITESTAR_RECEIVE,
+                    name=getattr(receive, "__qualname__", str(receive)),
+                    origin=LitestarIntegration.origin,
+                ) as span:
+                    span.set_tag("litestar.middleware_name", middleware_name)
+                    return await receive(*args, **kwargs)
+
+            receive_name = getattr(receive, "__name__", str(receive))
+            receive_patched = receive_name == "_sentry_receive"
+            new_receive = _sentry_receive if not receive_patched else receive
+
+            # Creating spans for the "send" callback
+            async def _sentry_send(message):
+                # type: (Message) -> None
+                if sentry_sdk.get_client().get_integration(LitestarIntegration) is None:
+                    return await send(message)
+                with sentry_sdk.start_span(
+                    op=OP.MIDDLEWARE_LITESTAR_SEND,
+                    name=getattr(send, "__qualname__", str(send)),
+                    origin=LitestarIntegration.origin,
+                ) as span:
+                    span.set_tag("litestar.middleware_name", middleware_name)
+                    return await send(message)
+
+            send_name = getattr(send, "__name__", str(send))
+            send_patched = send_name == "_sentry_send"
+            new_send = _sentry_send if not send_patched else send
+
+            return await old_call(self, scope, new_receive, new_send)
+
+    not_yet_patched = old_call.__name__ not in ["_create_span_call"]
+
+    if not_yet_patched:
+        if isinstance(middleware, DefineMiddleware):
+            middleware.middleware.__call__ = _create_span_call
+        else:
+            middleware.__call__ = _create_span_call
+
+    return middleware
+
+
+def patch_http_route_handle():
+    # type: () -> None
+    old_handle = HTTPRoute.handle
+
+    async def handle_wrapper(self, scope, receive, send):
+        # type: (HTTPRoute, HTTPScope, Receive, Send) -> None
+        if sentry_sdk.get_client().get_integration(LitestarIntegration) is None:
+            return await old_handle(self, scope, receive, send)
+
+        sentry_scope = sentry_sdk.get_isolation_scope()
+        request = scope["app"].request_class(
+            scope=scope, receive=receive, send=send
+        )  # type: Request[Any, Any]
+        extracted_request_data = ConnectionDataExtractor(
+            parse_body=True, parse_query=True
+        )(request)
+        body = extracted_request_data.pop("body")
+
+        request_data = await body
+
+        def event_processor(event, _):
+            # type: (Event, Hint) -> Event
+            route_handler = scope.get("route_handler")
+
+            request_info = event.get("request", {})
+            request_info["content_length"] = len(scope.get("_body", b""))
+            if should_send_default_pii():
+                request_info["cookies"] = extracted_request_data["cookies"]
+            if request_data is not None:
+                request_info["data"] = request_data
+
+            func = None
+            if route_handler.name is not None:
+                tx_name = route_handler.name
+            # Accounts for use of type `Ref` in earlier versions of litestar without the need to reference it as a type
+            elif hasattr(route_handler.fn, "value"):
+                func = route_handler.fn.value
+            else:
+                func = route_handler.fn
+            if func is not None:
+                tx_name = transaction_from_function(func)
+
+            tx_info = {"source": SOURCE_FOR_STYLE["endpoint"]}
+
+            if not tx_name:
+                tx_name = _DEFAULT_TRANSACTION_NAME
+                tx_info = {"source": TransactionSource.ROUTE}
+
+            event.update(
+                {
+                    "request": request_info,
+                    "transaction": tx_name,
+                    "transaction_info": tx_info,
+                }
+            )
+            return event
+
+        sentry_scope._name = LitestarIntegration.identifier
+        sentry_scope.add_event_processor(event_processor)
+
+        return await old_handle(self, scope, receive, send)
+
+    HTTPRoute.handle = handle_wrapper
+
+
+def retrieve_user_from_scope(scope):
+    # type: (LitestarScope) -> Optional[dict[str, Any]]
+    scope_user = scope.get("user")
+    if isinstance(scope_user, dict):
+        return scope_user
+    if hasattr(scope_user, "asdict"):  # dataclasses
+        return scope_user.asdict()
+
+    return None
+
+
+@ensure_integration_enabled(LitestarIntegration)
+def exception_handler(exc, scope):
+    # type: (Exception, LitestarScope) -> None
+    user_info = None  # type: Optional[dict[str, Any]]
+    if should_send_default_pii():
+        user_info = retrieve_user_from_scope(scope)
+    if user_info and isinstance(user_info, dict):
+        sentry_scope = sentry_sdk.get_isolation_scope()
+        sentry_scope.set_user(user_info)
+
+    if isinstance(exc, HTTPException):
+        integration = sentry_sdk.get_client().get_integration(LitestarIntegration)
+        if (
+            integration is not None
+            and exc.status_code not in integration.failed_request_status_codes
+        ):
+            return
+
+    event, hint = event_from_exception(
+        exc,
+        client_options=sentry_sdk.get_client().options,
+        mechanism={"type": LitestarIntegration.identifier, "handled": False},
+    )
+
+    sentry_sdk.capture_event(event, hint=hint)
diff --git a/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/logging.py b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/logging.py
new file mode 100644
index 00000000..3777381b
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/logging.py
@@ -0,0 +1,298 @@
+import logging
+from datetime import datetime, timezone
+from fnmatch import fnmatch
+
+import sentry_sdk
+from sentry_sdk.utils import (
+    to_string,
+    event_from_exception,
+    current_stacktrace,
+    capture_internal_exceptions,
+)
+from sentry_sdk.integrations import Integration
+
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+    from collections.abc import MutableMapping
+    from logging import LogRecord
+    from typing import Any
+    from typing import Dict
+    from typing import Optional
+
+DEFAULT_LEVEL = logging.INFO
+DEFAULT_EVENT_LEVEL = logging.ERROR
+LOGGING_TO_EVENT_LEVEL = {
+    logging.NOTSET: "notset",
+    logging.DEBUG: "debug",
+    logging.INFO: "info",
+    logging.WARN: "warning",  # WARN is same a WARNING
+    logging.WARNING: "warning",
+    logging.ERROR: "error",
+    logging.FATAL: "fatal",
+    logging.CRITICAL: "fatal",  # CRITICAL is same as FATAL
+}
+
+# Capturing events from those loggers causes recursion errors. We cannot allow
+# the user to unconditionally create events from those loggers under any
+# circumstances.
+#
+# Note: Ignoring by logger name here is better than mucking with thread-locals.
+# We do not necessarily know whether thread-locals work 100% correctly in the user's environment.
+_IGNORED_LOGGERS = set(
+    ["sentry_sdk.errors", "urllib3.connectionpool", "urllib3.connection"]
+)
+
+
+def ignore_logger(
+    name,  # type: str
+):
+    # type: (...) -> None
+    """This disables recording (both in breadcrumbs and as events) calls to
+    a logger of a specific name.  Among other uses, many of our integrations
+    use this to prevent their actions being recorded as breadcrumbs. Exposed
+    to users as a way to quiet spammy loggers.
+
+    :param name: The name of the logger to ignore (same string you would pass to ``logging.getLogger``).
+    """
+    _IGNORED_LOGGERS.add(name)
+
+
+class LoggingIntegration(Integration):
+    identifier = "logging"
+
+    def __init__(self, level=DEFAULT_LEVEL, event_level=DEFAULT_EVENT_LEVEL):
+        # type: (Optional[int], Optional[int]) -> None
+        self._handler = None
+        self._breadcrumb_handler = None
+
+        if level is not None:
+            self._breadcrumb_handler = BreadcrumbHandler(level=level)
+
+        if event_level is not None:
+            self._handler = EventHandler(level=event_level)
+
+    def _handle_record(self, record):
+        # type: (LogRecord) -> None
+        if self._handler is not None and record.levelno >= self._handler.level:
+            self._handler.handle(record)
+
+        if (
+            self._breadcrumb_handler is not None
+            and record.levelno >= self._breadcrumb_handler.level
+        ):
+            self._breadcrumb_handler.handle(record)
+
+    @staticmethod
+    def setup_once():
+        # type: () -> None
+        old_callhandlers = logging.Logger.callHandlers
+
+        def sentry_patched_callhandlers(self, record):
+            # type: (Any, LogRecord) -> Any
+            # keeping a local reference because the
+            # global might be discarded on shutdown
+            ignored_loggers = _IGNORED_LOGGERS
+
+            try:
+                return old_callhandlers(self, record)
+            finally:
+                # This check is done twice, once also here before we even get
+                # the integration.  Otherwise we have a high chance of getting
+                # into a recursion error when the integration is resolved
+                # (this also is slower).
+                if ignored_loggers is not None and record.name not in ignored_loggers:
+                    integration = sentry_sdk.get_client().get_integration(
+                        LoggingIntegration
+                    )
+                    if integration is not None:
+                        integration._handle_record(record)
+
+        logging.Logger.callHandlers = sentry_patched_callhandlers  # type: ignore
+
+
+class _BaseHandler(logging.Handler):
+    COMMON_RECORD_ATTRS = frozenset(
+        (
+            "args",
+            "created",
+            "exc_info",
+            "exc_text",
+            "filename",
+            "funcName",
+            "levelname",
+            "levelno",
+            "linenno",
+            "lineno",
+            "message",
+            "module",
+            "msecs",
+            "msg",
+            "name",
+            "pathname",
+            "process",
+            "processName",
+            "relativeCreated",
+            "stack",
+            "tags",
+            "taskName",
+            "thread",
+            "threadName",
+            "stack_info",
+        )
+    )
+
+    def _can_record(self, record):
+        # type: (LogRecord) -> bool
+        """Prevents ignored loggers from recording"""
+        for logger in _IGNORED_LOGGERS:
+            if fnmatch(record.name, logger):
+                return False
+        return True
+
+    def _logging_to_event_level(self, record):
+        # type: (LogRecord) -> str
+        return LOGGING_TO_EVENT_LEVEL.get(
+            record.levelno, record.levelname.lower() if record.levelname else ""
+        )
+
+    def _extra_from_record(self, record):
+        # type: (LogRecord) -> MutableMapping[str, object]
+        return {
+            k: v
+            for k, v in vars(record).items()
+            if k not in self.COMMON_RECORD_ATTRS
+            and (not isinstance(k, str) or not k.startswith("_"))
+        }
+
+
+class EventHandler(_BaseHandler):
+    """
+    A logging handler that emits Sentry events for each log record
+
+    Note that you do not have to use this class if the logging integration is enabled, which it is by default.
+    """
+
+    def emit(self, record):
+        # type: (LogRecord) -> Any
+        with capture_internal_exceptions():
+            self.format(record)
+            return self._emit(record)
+
+    def _emit(self, record):
+        # type: (LogRecord) -> None
+        if not self._can_record(record):
+            return
+
+        client = sentry_sdk.get_client()
+        if not client.is_active():
+            return
+
+        client_options = client.options
+
+        # exc_info might be None or (None, None, None)
+        #
+        # exc_info may also be any falsy value due to Python stdlib being
+        # liberal with what it receives and Celery's billiard being "liberal"
+        # with what it sends. See
+        # https://github.com/getsentry/sentry-python/issues/904
+        if record.exc_info and record.exc_info[0] is not None:
+            event, hint = event_from_exception(
+                record.exc_info,
+                client_options=client_options,
+                mechanism={"type": "logging", "handled": True},
+            )
+        elif (record.exc_info and record.exc_info[0] is None) or record.stack_info:
+            event = {}
+            hint = {}
+            with capture_internal_exceptions():
+                event["threads"] = {
+                    "values": [
+                        {
+                            "stacktrace": current_stacktrace(
+                                include_local_variables=client_options[
+                                    "include_local_variables"
+                                ],
+                                max_value_length=client_options["max_value_length"],
+                            ),
+                            "crashed": False,
+                            "current": True,
+                        }
+                    ]
+                }
+        else:
+            event = {}
+            hint = {}
+
+        hint["log_record"] = record
+
+        level = self._logging_to_event_level(record)
+        if level in {"debug", "info", "warning", "error", "critical", "fatal"}:
+            event["level"] = level  # type: ignore[typeddict-item]
+        event["logger"] = record.name
+
+        # Log records from `warnings` module as separate issues
+        record_captured_from_warnings_module = (
+            record.name == "py.warnings" and record.msg == "%s"
+        )
+        if record_captured_from_warnings_module:
+            # use the actual message and not "%s" as the message
+            # this prevents grouping all warnings under one "%s" issue
+            msg = record.args[0]  # type: ignore
+
+            event["logentry"] = {
+                "message": msg,
+                "params": (),
+            }
+
+        else:
+            event["logentry"] = {
+                "message": to_string(record.msg),
+                "params": (
+                    tuple(str(arg) if arg is None else arg for arg in record.args)
+                    if record.args
+                    else ()
+                ),
+            }
+
+        event["extra"] = self._extra_from_record(record)
+
+        sentry_sdk.capture_event(event, hint=hint)
+
+
+# Legacy name
+SentryHandler = EventHandler
+
+
+class BreadcrumbHandler(_BaseHandler):
+    """
+    A logging handler that records breadcrumbs for each log record.
+
+    Note that you do not have to use this class if the logging integration is enabled, which it is by default.
+    """
+
+    def emit(self, record):
+        # type: (LogRecord) -> Any
+        with capture_internal_exceptions():
+            self.format(record)
+            return self._emit(record)
+
+    def _emit(self, record):
+        # type: (LogRecord) -> None
+        if not self._can_record(record):
+            return
+
+        sentry_sdk.add_breadcrumb(
+            self._breadcrumb_from_record(record), hint={"log_record": record}
+        )
+
+    def _breadcrumb_from_record(self, record):
+        # type: (LogRecord) -> Dict[str, Any]
+        return {
+            "type": "log",
+            "level": self._logging_to_event_level(record),
+            "category": record.name,
+            "message": record.message,
+            "timestamp": datetime.fromtimestamp(record.created, timezone.utc),
+            "data": self._extra_from_record(record),
+        }
diff --git a/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/loguru.py b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/loguru.py
new file mode 100644
index 00000000..5b76ea81
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/loguru.py
@@ -0,0 +1,130 @@
+import enum
+
+from sentry_sdk.integrations import Integration, DidNotEnable
+from sentry_sdk.integrations.logging import (
+    BreadcrumbHandler,
+    EventHandler,
+    _BaseHandler,
+)
+
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+    from logging import LogRecord
+    from typing import Optional, Tuple, Any
+
+try:
+    import loguru
+    from loguru import logger
+    from loguru._defaults import LOGURU_FORMAT as DEFAULT_FORMAT
+except ImportError:
+    raise DidNotEnable("LOGURU is not installed")
+
+
+class LoggingLevels(enum.IntEnum):
+    TRACE = 5
+    DEBUG = 10
+    INFO = 20
+    SUCCESS = 25
+    WARNING = 30
+    ERROR = 40
+    CRITICAL = 50
+
+
+SENTRY_LEVEL_FROM_LOGURU_LEVEL = {
+    "TRACE": "DEBUG",
+    "DEBUG": "DEBUG",
+    "INFO": "INFO",
+    "SUCCESS": "INFO",
+    "WARNING": "WARNING",
+    "ERROR": "ERROR",
+    "CRITICAL": "CRITICAL",
+}
+
+DEFAULT_LEVEL = LoggingLevels.INFO.value
+DEFAULT_EVENT_LEVEL = LoggingLevels.ERROR.value
+# We need to save the handlers to be able to remove them later
+# in tests (they call `LoguruIntegration.__init__` multiple times,
+# and we can't use `setup_once` because it's called before
+# than we get configuration).
+_ADDED_HANDLERS = (None, None)  # type: Tuple[Optional[int], Optional[int]]
+
+
+class LoguruIntegration(Integration):
+    identifier = "loguru"
+
+    def __init__(
+        self,
+        level=DEFAULT_LEVEL,
+        event_level=DEFAULT_EVENT_LEVEL,
+        breadcrumb_format=DEFAULT_FORMAT,
+        event_format=DEFAULT_FORMAT,
+    ):
+        # type: (Optional[int], Optional[int], str | loguru.FormatFunction, str | loguru.FormatFunction) -> None
+        global _ADDED_HANDLERS
+        breadcrumb_handler, event_handler = _ADDED_HANDLERS
+
+        if breadcrumb_handler is not None:
+            logger.remove(breadcrumb_handler)
+            breadcrumb_handler = None
+        if event_handler is not None:
+            logger.remove(event_handler)
+            event_handler = None
+
+        if level is not None:
+            breadcrumb_handler = logger.add(
+                LoguruBreadcrumbHandler(level=level),
+                level=level,
+                format=breadcrumb_format,
+            )
+
+        if event_level is not None:
+            event_handler = logger.add(
+                LoguruEventHandler(level=event_level),
+                level=event_level,
+                format=event_format,
+            )
+
+        _ADDED_HANDLERS = (breadcrumb_handler, event_handler)
+
+    @staticmethod
+    def setup_once():
+        # type: () -> None
+        pass  # we do everything in __init__
+
+
+class _LoguruBaseHandler(_BaseHandler):
+    def _logging_to_event_level(self, record):
+        # type: (LogRecord) -> str
+        try:
+            return SENTRY_LEVEL_FROM_LOGURU_LEVEL[
+                LoggingLevels(record.levelno).name
+            ].lower()
+        except (ValueError, KeyError):
+            return record.levelname.lower() if record.levelname else ""
+
+
+class LoguruEventHandler(_LoguruBaseHandler, EventHandler):
+    """Modified version of :class:`sentry_sdk.integrations.logging.EventHandler` to use loguru's level names."""
+
+    def __init__(self, *args, **kwargs):
+        # type: (*Any, **Any) -> None
+        if kwargs.get("level"):
+            kwargs["level"] = SENTRY_LEVEL_FROM_LOGURU_LEVEL.get(
+                kwargs.get("level", ""), DEFAULT_LEVEL
+            )
+
+        super().__init__(*args, **kwargs)
+
+
+class LoguruBreadcrumbHandler(_LoguruBaseHandler, BreadcrumbHandler):
+    """Modified version of :class:`sentry_sdk.integrations.logging.BreadcrumbHandler` to use loguru's level names."""
+
+    def __init__(self, *args, **kwargs):
+        # type: (*Any, **Any) -> None
+        if kwargs.get("level"):
+            kwargs["level"] = SENTRY_LEVEL_FROM_LOGURU_LEVEL.get(
+                kwargs.get("level", ""), DEFAULT_LEVEL
+            )
+
+        super().__init__(*args, **kwargs)
diff --git a/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/modules.py b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/modules.py
new file mode 100644
index 00000000..ce3ee786
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/modules.py
@@ -0,0 +1,29 @@
+import sentry_sdk
+from sentry_sdk.integrations import Integration
+from sentry_sdk.scope import add_global_event_processor
+from sentry_sdk.utils import _get_installed_modules
+
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+    from typing import Any
+    from sentry_sdk._types import Event
+
+
+class ModulesIntegration(Integration):
+    identifier = "modules"
+
+    @staticmethod
+    def setup_once():
+        # type: () -> None
+        @add_global_event_processor
+        def processor(event, hint):
+            # type: (Event, Any) -> Event
+            if event.get("type") == "transaction":
+                return event
+
+            if sentry_sdk.get_client().get_integration(ModulesIntegration) is None:
+                return event
+
+            event["modules"] = _get_installed_modules()
+            return event
diff --git a/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/openai.py b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/openai.py
new file mode 100644
index 00000000..61d335b1
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/openai.py
@@ -0,0 +1,429 @@
+from functools import wraps
+
+import sentry_sdk
+from sentry_sdk import consts
+from sentry_sdk.ai.monitoring import record_token_usage
+from sentry_sdk.ai.utils import set_data_normalized
+from sentry_sdk.consts import SPANDATA
+from sentry_sdk.integrations import DidNotEnable, Integration
+from sentry_sdk.scope import should_send_default_pii
+from sentry_sdk.utils import (
+    capture_internal_exceptions,
+    event_from_exception,
+)
+
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+    from typing import Any, Iterable, List, Optional, Callable, AsyncIterator, Iterator
+    from sentry_sdk.tracing import Span
+
+try:
+    from openai.resources.chat.completions import Completions, AsyncCompletions
+    from openai.resources import Embeddings, AsyncEmbeddings
+
+    if TYPE_CHECKING:
+        from openai.types.chat import ChatCompletionMessageParam, ChatCompletionChunk
+except ImportError:
+    raise DidNotEnable("OpenAI not installed")
+
+
+class OpenAIIntegration(Integration):
+    identifier = "openai"
+    origin = f"auto.ai.{identifier}"
+
+    def __init__(self, include_prompts=True, tiktoken_encoding_name=None):
+        # type: (OpenAIIntegration, bool, Optional[str]) -> None
+        self.include_prompts = include_prompts
+
+        self.tiktoken_encoding = None
+        if tiktoken_encoding_name is not None:
+            import tiktoken  # type: ignore
+
+            self.tiktoken_encoding = tiktoken.get_encoding(tiktoken_encoding_name)
+
+    @staticmethod
+    def setup_once():
+        # type: () -> None
+        Completions.create = _wrap_chat_completion_create(Completions.create)
+        Embeddings.create = _wrap_embeddings_create(Embeddings.create)
+
+        AsyncCompletions.create = _wrap_async_chat_completion_create(
+            AsyncCompletions.create
+        )
+        AsyncEmbeddings.create = _wrap_async_embeddings_create(AsyncEmbeddings.create)
+
+    def count_tokens(self, s):
+        # type: (OpenAIIntegration, str) -> int
+        if self.tiktoken_encoding is not None:
+            return len(self.tiktoken_encoding.encode_ordinary(s))
+        return 0
+
+
+def _capture_exception(exc):
+    # type: (Any) -> None
+    event, hint = event_from_exception(
+        exc,
+        client_options=sentry_sdk.get_client().options,
+        mechanism={"type": "openai", "handled": False},
+    )
+    sentry_sdk.capture_event(event, hint=hint)
+
+
+def _calculate_chat_completion_usage(
+    messages, response, span, streaming_message_responses, count_tokens
+):
+    # type: (Iterable[ChatCompletionMessageParam], Any, Span, Optional[List[str]], Callable[..., Any]) -> None
+    completion_tokens = 0  # type: Optional[int]
+    prompt_tokens = 0  # type: Optional[int]
+    total_tokens = 0  # type: Optional[int]
+    if hasattr(response, "usage"):
+        if hasattr(response.usage, "completion_tokens") and isinstance(
+            response.usage.completion_tokens, int
+        ):
+            completion_tokens = response.usage.completion_tokens
+        if hasattr(response.usage, "prompt_tokens") and isinstance(
+            response.usage.prompt_tokens, int
+        ):
+            prompt_tokens = response.usage.prompt_tokens
+        if hasattr(response.usage, "total_tokens") and isinstance(
+            response.usage.total_tokens, int
+        ):
+            total_tokens = response.usage.total_tokens
+
+    if prompt_tokens == 0:
+        for message in messages:
+            if "content" in message:
+                prompt_tokens += count_tokens(message["content"])
+
+    if completion_tokens == 0:
+        if streaming_message_responses is not None:
+            for message in streaming_message_responses:
+                completion_tokens += count_tokens(message)
+        elif hasattr(response, "choices"):
+            for choice in response.choices:
+                if hasattr(choice, "message"):
+                    completion_tokens += count_tokens(choice.message)
+
+    if prompt_tokens == 0:
+        prompt_tokens = None
+    if completion_tokens == 0:
+        completion_tokens = None
+    if total_tokens == 0:
+        total_tokens = None
+    record_token_usage(span, prompt_tokens, completion_tokens, total_tokens)
+
+
+def _new_chat_completion_common(f, *args, **kwargs):
+    # type: (Any, *Any, **Any) -> Any
+    integration = sentry_sdk.get_client().get_integration(OpenAIIntegration)
+    if integration is None:
+        return f(*args, **kwargs)
+
+    if "messages" not in kwargs:
+        # invalid call (in all versions of openai), let it return error
+        return f(*args, **kwargs)
+
+    try:
+        iter(kwargs["messages"])
+    except TypeError:
+        # invalid call (in all versions), messages must be iterable
+        return f(*args, **kwargs)
+
+    kwargs["messages"] = list(kwargs["messages"])
+    messages = kwargs["messages"]
+    model = kwargs.get("model")
+    streaming = kwargs.get("stream")
+
+    span = sentry_sdk.start_span(
+        op=consts.OP.OPENAI_CHAT_COMPLETIONS_CREATE,
+        name="Chat Completion",
+        origin=OpenAIIntegration.origin,
+    )
+    span.__enter__()
+
+    res = yield f, args, kwargs
+
+    with capture_internal_exceptions():
+        if should_send_default_pii() and integration.include_prompts:
+            set_data_normalized(span, SPANDATA.AI_INPUT_MESSAGES, messages)
+
+        set_data_normalized(span, SPANDATA.AI_MODEL_ID, model)
+        set_data_normalized(span, SPANDATA.AI_STREAMING, streaming)
+
+        if hasattr(res, "choices"):
+            if should_send_default_pii() and integration.include_prompts:
+                set_data_normalized(
+                    span,
+                    "ai.responses",
+                    list(map(lambda x: x.message, res.choices)),
+                )
+            _calculate_chat_completion_usage(
+                messages, res, span, None, integration.count_tokens
+            )
+            span.__exit__(None, None, None)
+        elif hasattr(res, "_iterator"):
+            data_buf: list[list[str]] = []  # one for each choice
+
+            old_iterator = res._iterator
+
+            def new_iterator():
+                # type: () -> Iterator[ChatCompletionChunk]
+                with capture_internal_exceptions():
+                    for x in old_iterator:
+                        if hasattr(x, "choices"):
+                            choice_index = 0
+                            for choice in x.choices:
+                                if hasattr(choice, "delta") and hasattr(
+                                    choice.delta, "content"
+                                ):
+                                    content = choice.delta.content
+                                    if len(data_buf) <= choice_index:
+                                        data_buf.append([])
+                                    data_buf[choice_index].append(content or "")
+                                choice_index += 1
+                        yield x
+                    if len(data_buf) > 0:
+                        all_responses = list(
+                            map(lambda chunk: "".join(chunk), data_buf)
+                        )
+                        if should_send_default_pii() and integration.include_prompts:
+                            set_data_normalized(
+                                span, SPANDATA.AI_RESPONSES, all_responses
+                            )
+                        _calculate_chat_completion_usage(
+                            messages,
+                            res,
+                            span,
+                            all_responses,
+                            integration.count_tokens,
+                        )
+                span.__exit__(None, None, None)
+
+            async def new_iterator_async():
+                # type: () -> AsyncIterator[ChatCompletionChunk]
+                with capture_internal_exceptions():
+                    async for x in old_iterator:
+                        if hasattr(x, "choices"):
+                            choice_index = 0
+                            for choice in x.choices:
+                                if hasattr(choice, "delta") and hasattr(
+                                    choice.delta, "content"
+                                ):
+                                    content = choice.delta.content
+                                    if len(data_buf) <= choice_index:
+                                        data_buf.append([])
+                                    data_buf[choice_index].append(content or "")
+                                choice_index += 1
+                        yield x
+                    if len(data_buf) > 0:
+                        all_responses = list(
+                            map(lambda chunk: "".join(chunk), data_buf)
+                        )
+                        if should_send_default_pii() and integration.include_prompts:
+                            set_data_normalized(
+                                span, SPANDATA.AI_RESPONSES, all_responses
+                            )
+                        _calculate_chat_completion_usage(
+                            messages,
+                            res,
+                            span,
+                            all_responses,
+                            integration.count_tokens,
+                        )
+                span.__exit__(None, None, None)
+
+            if str(type(res._iterator)) == "<class 'async_generator'>":
+                res._iterator = new_iterator_async()
+            else:
+                res._iterator = new_iterator()
+
+        else:
+            set_data_normalized(span, "unknown_response", True)
+            span.__exit__(None, None, None)
+    return res
+
+
+def _wrap_chat_completion_create(f):
+    # type: (Callable[..., Any]) -> Callable[..., Any]
+    def _execute_sync(f, *args, **kwargs):
+        # type: (Any, *Any, **Any) -> Any
+        gen = _new_chat_completion_common(f, *args, **kwargs)
+
+        try:
+            f, args, kwargs = next(gen)
+        except StopIteration as e:
+            return e.value
+
+        try:
+            try:
+                result = f(*args, **kwargs)
+            except Exception as e:
+                _capture_exception(e)
+                raise e from None
+
+            return gen.send(result)
+        except StopIteration as e:
+            return e.value
+
+    @wraps(f)
+    def _sentry_patched_create_sync(*args, **kwargs):
+        # type: (*Any, **Any) -> Any
+        integration = sentry_sdk.get_client().get_integration(OpenAIIntegration)
+        if integration is None or "messages" not in kwargs:
+            # no "messages" means invalid call (in all versions of openai), let it return error
+            return f(*args, **kwargs)
+
+        return _execute_sync(f, *args, **kwargs)
+
+    return _sentry_patched_create_sync
+
+
+def _wrap_async_chat_completion_create(f):
+    # type: (Callable[..., Any]) -> Callable[..., Any]
+    async def _execute_async(f, *args, **kwargs):
+        # type: (Any, *Any, **Any) -> Any
+        gen = _new_chat_completion_common(f, *args, **kwargs)
+
+        try:
+            f, args, kwargs = next(gen)
+        except StopIteration as e:
+            return await e.value
+
+        try:
+            try:
+                result = await f(*args, **kwargs)
+            except Exception as e:
+                _capture_exception(e)
+                raise e from None
+
+            return gen.send(result)
+        except StopIteration as e:
+            return e.value
+
+    @wraps(f)
+    async def _sentry_patched_create_async(*args, **kwargs):
+        # type: (*Any, **Any) -> Any
+        integration = sentry_sdk.get_client().get_integration(OpenAIIntegration)
+        if integration is None or "messages" not in kwargs:
+            # no "messages" means invalid call (in all versions of openai), let it return error
+            return await f(*args, **kwargs)
+
+        return await _execute_async(f, *args, **kwargs)
+
+    return _sentry_patched_create_async
+
+
+def _new_embeddings_create_common(f, *args, **kwargs):
+    # type: (Any, *Any, **Any) -> Any
+    integration = sentry_sdk.get_client().get_integration(OpenAIIntegration)
+    if integration is None:
+        return f(*args, **kwargs)
+
+    with sentry_sdk.start_span(
+        op=consts.OP.OPENAI_EMBEDDINGS_CREATE,
+        description="OpenAI Embedding Creation",
+        origin=OpenAIIntegration.origin,
+    ) as span:
+        if "input" in kwargs and (
+            should_send_default_pii() and integration.include_prompts
+        ):
+            if isinstance(kwargs["input"], str):
+                set_data_normalized(span, "ai.input_messages", [kwargs["input"]])
+            elif (
+                isinstance(kwargs["input"], list)
+                and len(kwargs["input"]) > 0
+                and isinstance(kwargs["input"][0], str)
+            ):
+                set_data_normalized(span, "ai.input_messages", kwargs["input"])
+        if "model" in kwargs:
+            set_data_normalized(span, "ai.model_id", kwargs["model"])
+
+        response = yield f, args, kwargs
+
+        prompt_tokens = 0
+        total_tokens = 0
+        if hasattr(response, "usage"):
+            if hasattr(response.usage, "prompt_tokens") and isinstance(
+                response.usage.prompt_tokens, int
+            ):
+                prompt_tokens = response.usage.prompt_tokens
+            if hasattr(response.usage, "total_tokens") and isinstance(
+                response.usage.total_tokens, int
+            ):
+                total_tokens = response.usage.total_tokens
+
+        if prompt_tokens == 0:
+            prompt_tokens = integration.count_tokens(kwargs["input"] or "")
+
+        record_token_usage(span, prompt_tokens, None, total_tokens or prompt_tokens)
+
+        return response
+
+
+def _wrap_embeddings_create(f):
+    # type: (Any) -> Any
+    def _execute_sync(f, *args, **kwargs):
+        # type: (Any, *Any, **Any) -> Any
+        gen = _new_embeddings_create_common(f, *args, **kwargs)
+
+        try:
+            f, args, kwargs = next(gen)
+        except StopIteration as e:
+            return e.value
+
+        try:
+            try:
+                result = f(*args, **kwargs)
+            except Exception as e:
+                _capture_exception(e)
+                raise e from None
+
+            return gen.send(result)
+        except StopIteration as e:
+            return e.value
+
+    @wraps(f)
+    def _sentry_patched_create_sync(*args, **kwargs):
+        # type: (*Any, **Any) -> Any
+        integration = sentry_sdk.get_client().get_integration(OpenAIIntegration)
+        if integration is None:
+            return f(*args, **kwargs)
+
+        return _execute_sync(f, *args, **kwargs)
+
+    return _sentry_patched_create_sync
+
+
+def _wrap_async_embeddings_create(f):
+    # type: (Any) -> Any
+    async def _execute_async(f, *args, **kwargs):
+        # type: (Any, *Any, **Any) -> Any
+        gen = _new_embeddings_create_common(f, *args, **kwargs)
+
+        try:
+            f, args, kwargs = next(gen)
+        except StopIteration as e:
+            return await e.value
+
+        try:
+            try:
+                result = await f(*args, **kwargs)
+            except Exception as e:
+                _capture_exception(e)
+                raise e from None
+
+            return gen.send(result)
+        except StopIteration as e:
+            return e.value
+
+    @wraps(f)
+    async def _sentry_patched_create_async(*args, **kwargs):
+        # type: (*Any, **Any) -> Any
+        integration = sentry_sdk.get_client().get_integration(OpenAIIntegration)
+        if integration is None:
+            return await f(*args, **kwargs)
+
+        return await _execute_async(f, *args, **kwargs)
+
+    return _sentry_patched_create_async
diff --git a/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/openfeature.py b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/openfeature.py
new file mode 100644
index 00000000..bf66b94e
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/openfeature.py
@@ -0,0 +1,39 @@
+from typing import TYPE_CHECKING
+import sentry_sdk
+
+from sentry_sdk.integrations import DidNotEnable, Integration
+
+try:
+    from openfeature import api
+    from openfeature.hook import Hook
+
+    if TYPE_CHECKING:
+        from openfeature.flag_evaluation import FlagEvaluationDetails
+        from openfeature.hook import HookContext, HookHints
+except ImportError:
+    raise DidNotEnable("OpenFeature is not installed")
+
+
+class OpenFeatureIntegration(Integration):
+    identifier = "openfeature"
+
+    @staticmethod
+    def setup_once():
+        # type: () -> None
+        # Register the hook within the global openfeature hooks list.
+        api.add_hooks(hooks=[OpenFeatureHook()])
+
+
+class OpenFeatureHook(Hook):
+
+    def after(self, hook_context, details, hints):
+        # type: (HookContext, FlagEvaluationDetails[bool], HookHints) -> None
+        if isinstance(details.value, bool):
+            flags = sentry_sdk.get_current_scope().flags
+            flags.set(details.flag_key, details.value)
+
+    def error(self, hook_context, exception, hints):
+        # type: (HookContext, Exception, HookHints) -> None
+        if isinstance(hook_context.default_value, bool):
+            flags = sentry_sdk.get_current_scope().flags
+            flags.set(hook_context.flag_key, hook_context.default_value)
diff --git a/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/opentelemetry/__init__.py b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/opentelemetry/__init__.py
new file mode 100644
index 00000000..3c4c1a68
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/opentelemetry/__init__.py
@@ -0,0 +1,7 @@
+from sentry_sdk.integrations.opentelemetry.span_processor import SentrySpanProcessor
+from sentry_sdk.integrations.opentelemetry.propagator import SentryPropagator
+
+__all__ = [
+    "SentryPropagator",
+    "SentrySpanProcessor",
+]
diff --git a/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/opentelemetry/consts.py b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/opentelemetry/consts.py
new file mode 100644
index 00000000..ec493449
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/opentelemetry/consts.py
@@ -0,0 +1,5 @@
+from opentelemetry.context import create_key
+
+
+SENTRY_TRACE_KEY = create_key("sentry-trace")
+SENTRY_BAGGAGE_KEY = create_key("sentry-baggage")
diff --git a/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/opentelemetry/integration.py b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/opentelemetry/integration.py
new file mode 100644
index 00000000..43e0396c
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/opentelemetry/integration.py
@@ -0,0 +1,58 @@
+"""
+IMPORTANT: The contents of this file are part of a proof of concept and as such
+are experimental and not suitable for production use. They may be changed or
+removed at any time without prior notice.
+"""
+
+from sentry_sdk.integrations import DidNotEnable, Integration
+from sentry_sdk.integrations.opentelemetry.propagator import SentryPropagator
+from sentry_sdk.integrations.opentelemetry.span_processor import SentrySpanProcessor
+from sentry_sdk.utils import logger
+
+try:
+    from opentelemetry import trace
+    from opentelemetry.propagate import set_global_textmap
+    from opentelemetry.sdk.trace import TracerProvider
+except ImportError:
+    raise DidNotEnable("opentelemetry not installed")
+
+try:
+    from opentelemetry.instrumentation.django import DjangoInstrumentor  # type: ignore[import-not-found]
+except ImportError:
+    DjangoInstrumentor = None
+
+
+CONFIGURABLE_INSTRUMENTATIONS = {
+    DjangoInstrumentor: {"is_sql_commentor_enabled": True},
+}
+
+
+class OpenTelemetryIntegration(Integration):
+    identifier = "opentelemetry"
+
+    @staticmethod
+    def setup_once():
+        # type: () -> None
+        logger.warning(
+            "[OTel] Initializing highly experimental OpenTelemetry support. "
+            "Use at your own risk."
+        )
+
+        _setup_sentry_tracing()
+        # _setup_instrumentors()
+
+        logger.debug("[OTel] Finished setting up OpenTelemetry integration")
+
+
+def _setup_sentry_tracing():
+    # type: () -> None
+    provider = TracerProvider()
+    provider.add_span_processor(SentrySpanProcessor())
+    trace.set_tracer_provider(provider)
+    set_global_textmap(SentryPropagator())
+
+
+def _setup_instrumentors():
+    # type: () -> None
+    for instrumentor, kwargs in CONFIGURABLE_INSTRUMENTATIONS.items():
+        instrumentor().instrument(**kwargs)
diff --git a/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/opentelemetry/propagator.py b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/opentelemetry/propagator.py
new file mode 100644
index 00000000..b84d582d
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/opentelemetry/propagator.py
@@ -0,0 +1,117 @@
+from opentelemetry import trace
+from opentelemetry.context import (
+    Context,
+    get_current,
+    set_value,
+)
+from opentelemetry.propagators.textmap import (
+    CarrierT,
+    Getter,
+    Setter,
+    TextMapPropagator,
+    default_getter,
+    default_setter,
+)
+from opentelemetry.trace import (
+    NonRecordingSpan,
+    SpanContext,
+    TraceFlags,
+)
+
+from sentry_sdk.integrations.opentelemetry.consts import (
+    SENTRY_BAGGAGE_KEY,
+    SENTRY_TRACE_KEY,
+)
+from sentry_sdk.integrations.opentelemetry.span_processor import (
+    SentrySpanProcessor,
+)
+from sentry_sdk.tracing import (
+    BAGGAGE_HEADER_NAME,
+    SENTRY_TRACE_HEADER_NAME,
+)
+from sentry_sdk.tracing_utils import Baggage, extract_sentrytrace_data
+
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+    from typing import Optional, Set
+
+
+class SentryPropagator(TextMapPropagator):
+    """
+    Propagates tracing headers for Sentry's tracing system in a way OTel understands.
+    """
+
+    def extract(self, carrier, context=None, getter=default_getter):
+        # type: (CarrierT, Optional[Context], Getter[CarrierT]) -> Context
+        if context is None:
+            context = get_current()
+
+        sentry_trace = getter.get(carrier, SENTRY_TRACE_HEADER_NAME)
+        if not sentry_trace:
+            return context
+
+        sentrytrace = extract_sentrytrace_data(sentry_trace[0])
+        if not sentrytrace:
+            return context
+
+        context = set_value(SENTRY_TRACE_KEY, sentrytrace, context)
+
+        trace_id, span_id = sentrytrace["trace_id"], sentrytrace["parent_span_id"]
+
+        span_context = SpanContext(
+            trace_id=int(trace_id, 16),  # type: ignore
+            span_id=int(span_id, 16),  # type: ignore
+            # we simulate a sampled trace on the otel side and leave the sampling to sentry
+            trace_flags=TraceFlags(TraceFlags.SAMPLED),
+            is_remote=True,
+        )
+
+        baggage_header = getter.get(carrier, BAGGAGE_HEADER_NAME)
+
+        if baggage_header:
+            baggage = Baggage.from_incoming_header(baggage_header[0])
+        else:
+            # If there's an incoming sentry-trace but no incoming baggage header,
+            # for instance in traces coming from older SDKs,
+            # baggage will be empty and frozen and won't be populated as head SDK.
+            baggage = Baggage(sentry_items={})
+
+        baggage.freeze()
+        context = set_value(SENTRY_BAGGAGE_KEY, baggage, context)
+
+        span = NonRecordingSpan(span_context)
+        modified_context = trace.set_span_in_context(span, context)
+        return modified_context
+
+    def inject(self, carrier, context=None, setter=default_setter):
+        # type: (CarrierT, Optional[Context], Setter[CarrierT]) -> None
+        if context is None:
+            context = get_current()
+
+        current_span = trace.get_current_span(context)
+        current_span_context = current_span.get_span_context()
+
+        if not current_span_context.is_valid:
+            return
+
+        span_id = trace.format_span_id(current_span_context.span_id)
+
+        span_map = SentrySpanProcessor().otel_span_map
+        sentry_span = span_map.get(span_id, None)
+        if not sentry_span:
+            return
+
+        setter.set(carrier, SENTRY_TRACE_HEADER_NAME, sentry_span.to_traceparent())
+
+        if sentry_span.containing_transaction:
+            baggage = sentry_span.containing_transaction.get_baggage()
+            if baggage:
+                baggage_data = baggage.serialize()
+                if baggage_data:
+                    setter.set(carrier, BAGGAGE_HEADER_NAME, baggage_data)
+
+    @property
+    def fields(self):
+        # type: () -> Set[str]
+        return {SENTRY_TRACE_HEADER_NAME, BAGGAGE_HEADER_NAME}
diff --git a/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/opentelemetry/span_processor.py b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/opentelemetry/span_processor.py
new file mode 100644
index 00000000..e00562a5
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/opentelemetry/span_processor.py
@@ -0,0 +1,391 @@
+from datetime import datetime, timezone
+from time import time
+from typing import TYPE_CHECKING, cast
+
+from opentelemetry.context import get_value
+from opentelemetry.sdk.trace import SpanProcessor, ReadableSpan as OTelSpan
+from opentelemetry.semconv.trace import SpanAttributes
+from opentelemetry.trace import (
+    format_span_id,
+    format_trace_id,
+    get_current_span,
+    SpanKind,
+)
+from opentelemetry.trace.span import (
+    INVALID_SPAN_ID,
+    INVALID_TRACE_ID,
+)
+from sentry_sdk import get_client, start_transaction
+from sentry_sdk.consts import INSTRUMENTER, SPANSTATUS
+from sentry_sdk.integrations.opentelemetry.consts import (
+    SENTRY_BAGGAGE_KEY,
+    SENTRY_TRACE_KEY,
+)
+from sentry_sdk.scope import add_global_event_processor
+from sentry_sdk.tracing import Transaction, Span as SentrySpan
+from sentry_sdk.utils import Dsn
+
+from urllib3.util import parse_url as urlparse
+
+if TYPE_CHECKING:
+    from typing import Any, Optional, Union
+    from opentelemetry import context as context_api
+    from sentry_sdk._types import Event, Hint
+
+OPEN_TELEMETRY_CONTEXT = "otel"
+SPAN_MAX_TIME_OPEN_MINUTES = 10
+SPAN_ORIGIN = "auto.otel"
+
+
+def link_trace_context_to_error_event(event, otel_span_map):
+    # type: (Event, dict[str, Union[Transaction, SentrySpan]]) -> Event
+    client = get_client()
+
+    if client.options["instrumenter"] != INSTRUMENTER.OTEL:
+        return event
+
+    if hasattr(event, "type") and event["type"] == "transaction":
+        return event
+
+    otel_span = get_current_span()
+    if not otel_span:
+        return event
+
+    ctx = otel_span.get_span_context()
+
+    if ctx.trace_id == INVALID_TRACE_ID or ctx.span_id == INVALID_SPAN_ID:
+        return event
+
+    sentry_span = otel_span_map.get(format_span_id(ctx.span_id), None)
+    if not sentry_span:
+        return event
+
+    contexts = event.setdefault("contexts", {})
+    contexts.setdefault("trace", {}).update(sentry_span.get_trace_context())
+
+    return event
+
+
+class SentrySpanProcessor(SpanProcessor):
+    """
+    Converts OTel spans into Sentry spans so they can be sent to the Sentry backend.
+    """
+
+    # The mapping from otel span ids to sentry spans
+    otel_span_map = {}  # type: dict[str, Union[Transaction, SentrySpan]]
+
+    # The currently open spans. Elements will be discarded after SPAN_MAX_TIME_OPEN_MINUTES
+    open_spans = {}  # type: dict[int, set[str]]
+
+    def __new__(cls):
+        # type: () -> SentrySpanProcessor
+        if not hasattr(cls, "instance"):
+            cls.instance = super().__new__(cls)
+
+        return cls.instance
+
+    def __init__(self):
+        # type: () -> None
+        @add_global_event_processor
+        def global_event_processor(event, hint):
+            # type: (Event, Hint) -> Event
+            return link_trace_context_to_error_event(event, self.otel_span_map)
+
+    def _prune_old_spans(self):
+        # type: (SentrySpanProcessor) -> None
+        """
+        Prune spans that have been open for too long.
+        """
+        current_time_minutes = int(time() / 60)
+        for span_start_minutes in list(
+            self.open_spans.keys()
+        ):  # making a list because we change the dict
+            # prune empty open spans buckets
+            if self.open_spans[span_start_minutes] == set():
+                self.open_spans.pop(span_start_minutes)
+
+            # prune old buckets
+            elif current_time_minutes - span_start_minutes > SPAN_MAX_TIME_OPEN_MINUTES:
+                for span_id in self.open_spans.pop(span_start_minutes):
+                    self.otel_span_map.pop(span_id, None)
+
+    def on_start(self, otel_span, parent_context=None):
+        # type: (OTelSpan, Optional[context_api.Context]) -> None
+        client = get_client()
+
+        if not client.dsn:
+            return
+
+        try:
+            _ = Dsn(client.dsn)
+        except Exception:
+            return
+
+        if client.options["instrumenter"] != INSTRUMENTER.OTEL:
+            return
+
+        if not otel_span.get_span_context().is_valid:
+            return
+
+        if self._is_sentry_span(otel_span):
+            return
+
+        trace_data = self._get_trace_data(otel_span, parent_context)
+
+        parent_span_id = trace_data["parent_span_id"]
+        sentry_parent_span = (
+            self.otel_span_map.get(parent_span_id) if parent_span_id else None
+        )
+
+        start_timestamp = None
+        if otel_span.start_time is not None:
+            start_timestamp = datetime.fromtimestamp(
+                otel_span.start_time / 1e9, timezone.utc
+            )  # OTel spans have nanosecond precision
+
+        sentry_span = None
+        if sentry_parent_span:
+            sentry_span = sentry_parent_span.start_child(
+                span_id=trace_data["span_id"],
+                name=otel_span.name,
+                start_timestamp=start_timestamp,
+                instrumenter=INSTRUMENTER.OTEL,
+                origin=SPAN_ORIGIN,
+            )
+        else:
+            sentry_span = start_transaction(
+                name=otel_span.name,
+                span_id=trace_data["span_id"],
+                parent_span_id=parent_span_id,
+                trace_id=trace_data["trace_id"],
+                baggage=trace_data["baggage"],
+                start_timestamp=start_timestamp,
+                instrumenter=INSTRUMENTER.OTEL,
+                origin=SPAN_ORIGIN,
+            )
+
+        self.otel_span_map[trace_data["span_id"]] = sentry_span
+
+        if otel_span.start_time is not None:
+            span_start_in_minutes = int(
+                otel_span.start_time / 1e9 / 60
+            )  # OTel spans have nanosecond precision
+            self.open_spans.setdefault(span_start_in_minutes, set()).add(
+                trace_data["span_id"]
+            )
+
+        self._prune_old_spans()
+
+    def on_end(self, otel_span):
+        # type: (OTelSpan) -> None
+        client = get_client()
+
+        if client.options["instrumenter"] != INSTRUMENTER.OTEL:
+            return
+
+        span_context = otel_span.get_span_context()
+        if not span_context.is_valid:
+            return
+
+        span_id = format_span_id(span_context.span_id)
+        sentry_span = self.otel_span_map.pop(span_id, None)
+        if not sentry_span:
+            return
+
+        sentry_span.op = otel_span.name
+
+        self._update_span_with_otel_status(sentry_span, otel_span)
+
+        if isinstance(sentry_span, Transaction):
+            sentry_span.name = otel_span.name
+            sentry_span.set_context(
+                OPEN_TELEMETRY_CONTEXT, self._get_otel_context(otel_span)
+            )
+            self._update_transaction_with_otel_data(sentry_span, otel_span)
+
+        else:
+            self._update_span_with_otel_data(sentry_span, otel_span)
+
+        end_timestamp = None
+        if otel_span.end_time is not None:
+            end_timestamp = datetime.fromtimestamp(
+                otel_span.end_time / 1e9, timezone.utc
+            )  # OTel spans have nanosecond precision
+
+        sentry_span.finish(end_timestamp=end_timestamp)
+
+        if otel_span.start_time is not None:
+            span_start_in_minutes = int(
+                otel_span.start_time / 1e9 / 60
+            )  # OTel spans have nanosecond precision
+            self.open_spans.setdefault(span_start_in_minutes, set()).discard(span_id)
+
+        self._prune_old_spans()
+
+    def _is_sentry_span(self, otel_span):
+        # type: (OTelSpan) -> bool
+        """
+        Break infinite loop:
+        HTTP requests to Sentry are caught by OTel and send again to Sentry.
+        """
+        otel_span_url = None
+        if otel_span.attributes is not None:
+            otel_span_url = otel_span.attributes.get(SpanAttributes.HTTP_URL)
+        otel_span_url = cast("Optional[str]", otel_span_url)
+
+        dsn_url = None
+        client = get_client()
+        if client.dsn:
+            dsn_url = Dsn(client.dsn).netloc
+
+        if otel_span_url and dsn_url and dsn_url in otel_span_url:
+            return True
+
+        return False
+
+    def _get_otel_context(self, otel_span):
+        # type: (OTelSpan) -> dict[str, Any]
+        """
+        Returns the OTel context for Sentry.
+        See: https://develop.sentry.dev/sdk/performance/opentelemetry/#step-5-add-opentelemetry-context
+        """
+        ctx = {}
+
+        if otel_span.attributes:
+            ctx["attributes"] = dict(otel_span.attributes)
+
+        if otel_span.resource.attributes:
+            ctx["resource"] = dict(otel_span.resource.attributes)
+
+        return ctx
+
+    def _get_trace_data(self, otel_span, parent_context):
+        # type: (OTelSpan, Optional[context_api.Context]) -> dict[str, Any]
+        """
+        Extracts tracing information from one OTel span and its parent OTel context.
+        """
+        trace_data = {}  # type: dict[str, Any]
+        span_context = otel_span.get_span_context()
+
+        span_id = format_span_id(span_context.span_id)
+        trace_data["span_id"] = span_id
+
+        trace_id = format_trace_id(span_context.trace_id)
+        trace_data["trace_id"] = trace_id
+
+        parent_span_id = (
+            format_span_id(otel_span.parent.span_id) if otel_span.parent else None
+        )
+        trace_data["parent_span_id"] = parent_span_id
+
+        sentry_trace_data = get_value(SENTRY_TRACE_KEY, parent_context)
+        sentry_trace_data = cast("dict[str, Union[str, bool, None]]", sentry_trace_data)
+        trace_data["parent_sampled"] = (
+            sentry_trace_data["parent_sampled"] if sentry_trace_data else None
+        )
+
+        baggage = get_value(SENTRY_BAGGAGE_KEY, parent_context)
+        trace_data["baggage"] = baggage
+
+        return trace_data
+
+    def _update_span_with_otel_status(self, sentry_span, otel_span):
+        # type: (SentrySpan, OTelSpan) -> None
+        """
+        Set the Sentry span status from the OTel span
+        """
+        if otel_span.status.is_unset:
+            return
+
+        if otel_span.status.is_ok:
+            sentry_span.set_status(SPANSTATUS.OK)
+            return
+
+        sentry_span.set_status(SPANSTATUS.INTERNAL_ERROR)
+
+    def _update_span_with_otel_data(self, sentry_span, otel_span):
+        # type: (SentrySpan, OTelSpan) -> None
+        """
+        Convert OTel span data and update the Sentry span with it.
+        This should eventually happen on the server when ingesting the spans.
+        """
+        sentry_span.set_data("otel.kind", otel_span.kind)
+
+        op = otel_span.name
+        description = otel_span.name
+
+        if otel_span.attributes is not None:
+            for key, val in otel_span.attributes.items():
+                sentry_span.set_data(key, val)
+
+            http_method = otel_span.attributes.get(SpanAttributes.HTTP_METHOD)
+            http_method = cast("Optional[str]", http_method)
+
+            db_query = otel_span.attributes.get(SpanAttributes.DB_SYSTEM)
+
+            if http_method:
+                op = "http"
+
+                if otel_span.kind == SpanKind.SERVER:
+                    op += ".server"
+                elif otel_span.kind == SpanKind.CLIENT:
+                    op += ".client"
+
+                description = http_method
+
+                peer_name = otel_span.attributes.get(SpanAttributes.NET_PEER_NAME, None)
+                if peer_name:
+                    description += " {}".format(peer_name)
+
+                target = otel_span.attributes.get(SpanAttributes.HTTP_TARGET, None)
+                if target:
+                    description += " {}".format(target)
+
+                if not peer_name and not target:
+                    url = otel_span.attributes.get(SpanAttributes.HTTP_URL, None)
+                    url = cast("Optional[str]", url)
+                    if url:
+                        parsed_url = urlparse(url)
+                        url = "{}://{}{}".format(
+                            parsed_url.scheme, parsed_url.netloc, parsed_url.path
+                        )
+                        description += " {}".format(url)
+
+                status_code = otel_span.attributes.get(
+                    SpanAttributes.HTTP_STATUS_CODE, None
+                )
+                status_code = cast("Optional[int]", status_code)
+                if status_code:
+                    sentry_span.set_http_status(status_code)
+
+            elif db_query:
+                op = "db"
+                statement = otel_span.attributes.get(SpanAttributes.DB_STATEMENT, None)
+                statement = cast("Optional[str]", statement)
+                if statement:
+                    description = statement
+
+        sentry_span.op = op
+        sentry_span.description = description
+
+    def _update_transaction_with_otel_data(self, sentry_span, otel_span):
+        # type: (SentrySpan, OTelSpan) -> None
+        if otel_span.attributes is None:
+            return
+
+        http_method = otel_span.attributes.get(SpanAttributes.HTTP_METHOD)
+
+        if http_method:
+            status_code = otel_span.attributes.get(SpanAttributes.HTTP_STATUS_CODE)
+            status_code = cast("Optional[int]", status_code)
+            if status_code:
+                sentry_span.set_http_status(status_code)
+
+            op = "http"
+
+            if otel_span.kind == SpanKind.SERVER:
+                op += ".server"
+            elif otel_span.kind == SpanKind.CLIENT:
+                op += ".client"
+
+            sentry_span.op = op
diff --git a/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/pure_eval.py b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/pure_eval.py
new file mode 100644
index 00000000..c1c3d638
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/pure_eval.py
@@ -0,0 +1,139 @@
+import ast
+
+import sentry_sdk
+from sentry_sdk import serializer
+from sentry_sdk.integrations import Integration, DidNotEnable
+from sentry_sdk.scope import add_global_event_processor
+from sentry_sdk.utils import walk_exception_chain, iter_stacks
+
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+    from typing import Optional, Dict, Any, Tuple, List
+    from types import FrameType
+
+    from sentry_sdk._types import Event, Hint
+
+try:
+    import executing
+except ImportError:
+    raise DidNotEnable("executing is not installed")
+
+try:
+    import pure_eval
+except ImportError:
+    raise DidNotEnable("pure_eval is not installed")
+
+try:
+    # Used implicitly, just testing it's available
+    import asttokens  # noqa
+except ImportError:
+    raise DidNotEnable("asttokens is not installed")
+
+
+class PureEvalIntegration(Integration):
+    identifier = "pure_eval"
+
+    @staticmethod
+    def setup_once():
+        # type: () -> None
+
+        @add_global_event_processor
+        def add_executing_info(event, hint):
+            # type: (Event, Optional[Hint]) -> Optional[Event]
+            if sentry_sdk.get_client().get_integration(PureEvalIntegration) is None:
+                return event
+
+            if hint is None:
+                return event
+
+            exc_info = hint.get("exc_info", None)
+
+            if exc_info is None:
+                return event
+
+            exception = event.get("exception", None)
+
+            if exception is None:
+                return event
+
+            values = exception.get("values", None)
+
+            if values is None:
+                return event
+
+            for exception, (_exc_type, _exc_value, exc_tb) in zip(
+                reversed(values), walk_exception_chain(exc_info)
+            ):
+                sentry_frames = [
+                    frame
+                    for frame in exception.get("stacktrace", {}).get("frames", [])
+                    if frame.get("function")
+                ]
+                tbs = list(iter_stacks(exc_tb))
+                if len(sentry_frames) != len(tbs):
+                    continue
+
+                for sentry_frame, tb in zip(sentry_frames, tbs):
+                    sentry_frame["vars"] = (
+                        pure_eval_frame(tb.tb_frame) or sentry_frame["vars"]
+                    )
+            return event
+
+
+def pure_eval_frame(frame):
+    # type: (FrameType) -> Dict[str, Any]
+    source = executing.Source.for_frame(frame)
+    if not source.tree:
+        return {}
+
+    statements = source.statements_at_line(frame.f_lineno)
+    if not statements:
+        return {}
+
+    scope = stmt = list(statements)[0]
+    while True:
+        # Get the parent first in case the original statement is already
+        # a function definition, e.g. if we're calling a decorator
+        # In that case we still want the surrounding scope, not that function
+        scope = scope.parent
+        if isinstance(scope, (ast.FunctionDef, ast.ClassDef, ast.Module)):
+            break
+
+    evaluator = pure_eval.Evaluator.from_frame(frame)
+    expressions = evaluator.interesting_expressions_grouped(scope)
+
+    def closeness(expression):
+        # type: (Tuple[List[Any], Any]) -> Tuple[int, int]
+        # Prioritise expressions with a node closer to the statement executed
+        # without being after that statement
+        # A higher return value is better - the expression will appear
+        # earlier in the list of values and is less likely to be trimmed
+        nodes, _value = expression
+
+        def start(n):
+            # type: (ast.expr) -> Tuple[int, int]
+            return (n.lineno, n.col_offset)
+
+        nodes_before_stmt = [
+            node for node in nodes if start(node) < stmt.last_token.end  # type: ignore
+        ]
+        if nodes_before_stmt:
+            # The position of the last node before or in the statement
+            return max(start(node) for node in nodes_before_stmt)
+        else:
+            # The position of the first node after the statement
+            # Negative means it's always lower priority than nodes that come before
+            # Less negative means closer to the statement and higher priority
+            lineno, col_offset = min(start(node) for node in nodes)
+            return (-lineno, -col_offset)
+
+    # This adds the first_token and last_token attributes to nodes
+    atok = source.asttokens()
+
+    expressions.sort(key=closeness, reverse=True)
+    vars = {
+        atok.get_text(nodes[0]): value
+        for nodes, value in expressions[: serializer.MAX_DATABAG_BREADTH]
+    }
+    return serializer.serialize(vars, is_vars=True)
diff --git a/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/pymongo.py b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/pymongo.py
new file mode 100644
index 00000000..f65ad736
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/pymongo.py
@@ -0,0 +1,214 @@
+import copy
+import json
+
+import sentry_sdk
+from sentry_sdk.consts import SPANSTATUS, SPANDATA, OP
+from sentry_sdk.integrations import DidNotEnable, Integration
+from sentry_sdk.scope import should_send_default_pii
+from sentry_sdk.tracing import Span
+from sentry_sdk.utils import capture_internal_exceptions
+
+try:
+    from pymongo import monitoring
+except ImportError:
+    raise DidNotEnable("Pymongo not installed")
+
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+    from typing import Any, Dict, Union
+
+    from pymongo.monitoring import (
+        CommandFailedEvent,
+        CommandStartedEvent,
+        CommandSucceededEvent,
+    )
+
+
+SAFE_COMMAND_ATTRIBUTES = [
+    "insert",
+    "ordered",
+    "find",
+    "limit",
+    "singleBatch",
+    "aggregate",
+    "createIndexes",
+    "indexes",
+    "delete",
+    "findAndModify",
+    "renameCollection",
+    "to",
+    "drop",
+]
+
+
+def _strip_pii(command):
+    # type: (Dict[str, Any]) -> Dict[str, Any]
+    for key in command:
+        is_safe_field = key in SAFE_COMMAND_ATTRIBUTES
+        if is_safe_field:
+            # Skip if safe key
+            continue
+
+        update_db_command = key == "update" and "findAndModify" not in command
+        if update_db_command:
+            # Also skip "update" db command because it is save.
+            # There is also an "update" key in the "findAndModify" command, which is NOT safe!
+            continue
+
+        # Special stripping for documents
+        is_document = key == "documents"
+        if is_document:
+            for doc in command[key]:
+                for doc_key in doc:
+                    doc[doc_key] = "%s"
+            continue
+
+        # Special stripping for dict style fields
+        is_dict_field = key in ["filter", "query", "update"]
+        if is_dict_field:
+            for item_key in command[key]:
+                command[key][item_key] = "%s"
+            continue
+
+        # For pipeline fields strip the `$match` dict
+        is_pipeline_field = key == "pipeline"
+        if is_pipeline_field:
+            for pipeline in command[key]:
+                for match_key in pipeline["$match"] if "$match" in pipeline else []:
+                    pipeline["$match"][match_key] = "%s"
+            continue
+
+        # Default stripping
+        command[key] = "%s"
+
+    return command
+
+
+def _get_db_data(event):
+    # type: (Any) -> Dict[str, Any]
+    data = {}
+
+    data[SPANDATA.DB_SYSTEM] = "mongodb"
+
+    db_name = event.database_name
+    if db_name is not None:
+        data[SPANDATA.DB_NAME] = db_name
+
+    server_address = event.connection_id[0]
+    if server_address is not None:
+        data[SPANDATA.SERVER_ADDRESS] = server_address
+
+    server_port = event.connection_id[1]
+    if server_port is not None:
+        data[SPANDATA.SERVER_PORT] = server_port
+
+    return data
+
+
+class CommandTracer(monitoring.CommandListener):
+    def __init__(self):
+        # type: () -> None
+        self._ongoing_operations = {}  # type: Dict[int, Span]
+
+    def _operation_key(self, event):
+        # type: (Union[CommandFailedEvent, CommandStartedEvent, CommandSucceededEvent]) -> int
+        return event.request_id
+
+    def started(self, event):
+        # type: (CommandStartedEvent) -> None
+        if sentry_sdk.get_client().get_integration(PyMongoIntegration) is None:
+            return
+
+        with capture_internal_exceptions():
+            command = dict(copy.deepcopy(event.command))
+
+            command.pop("$db", None)
+            command.pop("$clusterTime", None)
+            command.pop("$signature", None)
+
+            tags = {
+                "db.name": event.database_name,
+                SPANDATA.DB_SYSTEM: "mongodb",
+                SPANDATA.DB_OPERATION: event.command_name,
+                SPANDATA.DB_MONGODB_COLLECTION: command.get(event.command_name),
+            }
+
+            try:
+                tags["net.peer.name"] = event.connection_id[0]
+                tags["net.peer.port"] = str(event.connection_id[1])
+            except TypeError:
+                pass
+
+            data = {"operation_ids": {}}  # type: Dict[str, Any]
+            data["operation_ids"]["operation"] = event.operation_id
+            data["operation_ids"]["request"] = event.request_id
+
+            data.update(_get_db_data(event))
+
+            try:
+                lsid = command.pop("lsid")["id"]
+                data["operation_ids"]["session"] = str(lsid)
+            except KeyError:
+                pass
+
+            if not should_send_default_pii():
+                command = _strip_pii(command)
+
+            query = json.dumps(command, default=str)
+            span = sentry_sdk.start_span(
+                op=OP.DB,
+                name=query,
+                origin=PyMongoIntegration.origin,
+            )
+
+            for tag, value in tags.items():
+                # set the tag for backwards-compatibility.
+                # TODO: remove the set_tag call in the next major release!
+                span.set_tag(tag, value)
+
+                span.set_data(tag, value)
+
+            for key, value in data.items():
+                span.set_data(key, value)
+
+            with capture_internal_exceptions():
+                sentry_sdk.add_breadcrumb(
+                    message=query, category="query", type=OP.DB, data=tags
+                )
+
+            self._ongoing_operations[self._operation_key(event)] = span.__enter__()
+
+    def failed(self, event):
+        # type: (CommandFailedEvent) -> None
+        if sentry_sdk.get_client().get_integration(PyMongoIntegration) is None:
+            return
+
+        try:
+            span = self._ongoing_operations.pop(self._operation_key(event))
+            span.set_status(SPANSTATUS.INTERNAL_ERROR)
+            span.__exit__(None, None, None)
+        except KeyError:
+            return
+
+    def succeeded(self, event):
+        # type: (CommandSucceededEvent) -> None
+        if sentry_sdk.get_client().get_integration(PyMongoIntegration) is None:
+            return
+
+        try:
+            span = self._ongoing_operations.pop(self._operation_key(event))
+            span.set_status(SPANSTATUS.OK)
+            span.__exit__(None, None, None)
+        except KeyError:
+            pass
+
+
+class PyMongoIntegration(Integration):
+    identifier = "pymongo"
+    origin = f"auto.db.{identifier}"
+
+    @staticmethod
+    def setup_once():
+        # type: () -> None
+        monitoring.register(CommandTracer())
diff --git a/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/pyramid.py b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/pyramid.py
new file mode 100644
index 00000000..d1475ada
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/pyramid.py
@@ -0,0 +1,229 @@
+import functools
+import os
+import sys
+import weakref
+
+import sentry_sdk
+from sentry_sdk.integrations import Integration, DidNotEnable
+from sentry_sdk.integrations._wsgi_common import RequestExtractor
+from sentry_sdk.integrations.wsgi import SentryWsgiMiddleware
+from sentry_sdk.scope import should_send_default_pii
+from sentry_sdk.tracing import SOURCE_FOR_STYLE
+from sentry_sdk.utils import (
+    capture_internal_exceptions,
+    ensure_integration_enabled,
+    event_from_exception,
+    reraise,
+)
+
+try:
+    from pyramid.httpexceptions import HTTPException
+    from pyramid.request import Request
+except ImportError:
+    raise DidNotEnable("Pyramid not installed")
+
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+    from pyramid.response import Response
+    from typing import Any
+    from sentry_sdk.integrations.wsgi import _ScopedResponse
+    from typing import Callable
+    from typing import Dict
+    from typing import Optional
+    from webob.cookies import RequestCookies
+    from webob.request import _FieldStorageWithFile
+
+    from sentry_sdk.utils import ExcInfo
+    from sentry_sdk._types import Event, EventProcessor
+
+
+if getattr(Request, "authenticated_userid", None):
+
+    def authenticated_userid(request):
+        # type: (Request) -> Optional[Any]
+        return request.authenticated_userid
+
+else:
+    # bw-compat for pyramid < 1.5
+    from pyramid.security import authenticated_userid  # type: ignore
+
+
+TRANSACTION_STYLE_VALUES = ("route_name", "route_pattern")
+
+
+class PyramidIntegration(Integration):
+    identifier = "pyramid"
+    origin = f"auto.http.{identifier}"
+
+    transaction_style = ""
+
+    def __init__(self, transaction_style="route_name"):
+        # type: (str) -> None
+        if transaction_style not in TRANSACTION_STYLE_VALUES:
+            raise ValueError(
+                "Invalid value for transaction_style: %s (must be in %s)"
+                % (transaction_style, TRANSACTION_STYLE_VALUES)
+            )
+        self.transaction_style = transaction_style
+
+    @staticmethod
+    def setup_once():
+        # type: () -> None
+        from pyramid import router
+
+        old_call_view = router._call_view
+
+        @functools.wraps(old_call_view)
+        def sentry_patched_call_view(registry, request, *args, **kwargs):
+            # type: (Any, Request, *Any, **Any) -> Response
+            integration = sentry_sdk.get_client().get_integration(PyramidIntegration)
+            if integration is None:
+                return old_call_view(registry, request, *args, **kwargs)
+
+            _set_transaction_name_and_source(
+                sentry_sdk.get_current_scope(), integration.transaction_style, request
+            )
+            scope = sentry_sdk.get_isolation_scope()
+            scope.add_event_processor(
+                _make_event_processor(weakref.ref(request), integration)
+            )
+
+            return old_call_view(registry, request, *args, **kwargs)
+
+        router._call_view = sentry_patched_call_view
+
+        if hasattr(Request, "invoke_exception_view"):
+            old_invoke_exception_view = Request.invoke_exception_view
+
+            def sentry_patched_invoke_exception_view(self, *args, **kwargs):
+                # type: (Request, *Any, **Any) -> Any
+                rv = old_invoke_exception_view(self, *args, **kwargs)
+
+                if (
+                    self.exc_info
+                    and all(self.exc_info)
+                    and rv.status_int == 500
+                    and sentry_sdk.get_client().get_integration(PyramidIntegration)
+                    is not None
+                ):
+                    _capture_exception(self.exc_info)
+
+                return rv
+
+            Request.invoke_exception_view = sentry_patched_invoke_exception_view
+
+        old_wsgi_call = router.Router.__call__
+
+        @ensure_integration_enabled(PyramidIntegration, old_wsgi_call)
+        def sentry_patched_wsgi_call(self, environ, start_response):
+            # type: (Any, Dict[str, str], Callable[..., Any]) -> _ScopedResponse
+            def sentry_patched_inner_wsgi_call(environ, start_response):
+                # type: (Dict[str, Any], Callable[..., Any]) -> Any
+                try:
+                    return old_wsgi_call(self, environ, start_response)
+                except Exception:
+                    einfo = sys.exc_info()
+                    _capture_exception(einfo)
+                    reraise(*einfo)
+
+            middleware = SentryWsgiMiddleware(
+                sentry_patched_inner_wsgi_call,
+                span_origin=PyramidIntegration.origin,
+            )
+            return middleware(environ, start_response)
+
+        router.Router.__call__ = sentry_patched_wsgi_call
+
+
+@ensure_integration_enabled(PyramidIntegration)
+def _capture_exception(exc_info):
+    # type: (ExcInfo) -> None
+    if exc_info[0] is None or issubclass(exc_info[0], HTTPException):
+        return
+
+    event, hint = event_from_exception(
+        exc_info,
+        client_options=sentry_sdk.get_client().options,
+        mechanism={"type": "pyramid", "handled": False},
+    )
+
+    sentry_sdk.capture_event(event, hint=hint)
+
+
+def _set_transaction_name_and_source(scope, transaction_style, request):
+    # type: (sentry_sdk.Scope, str, Request) -> None
+    try:
+        name_for_style = {
+            "route_name": request.matched_route.name,
+            "route_pattern": request.matched_route.pattern,
+        }
+        scope.set_transaction_name(
+            name_for_style[transaction_style],
+            source=SOURCE_FOR_STYLE[transaction_style],
+        )
+    except Exception:
+        pass
+
+
+class PyramidRequestExtractor(RequestExtractor):
+    def url(self):
+        # type: () -> str
+        return self.request.path_url
+
+    def env(self):
+        # type: () -> Dict[str, str]
+        return self.request.environ
+
+    def cookies(self):
+        # type: () -> RequestCookies
+        return self.request.cookies
+
+    def raw_data(self):
+        # type: () -> str
+        return self.request.text
+
+    def form(self):
+        # type: () -> Dict[str, str]
+        return {
+            key: value
+            for key, value in self.request.POST.items()
+            if not getattr(value, "filename", None)
+        }
+
+    def files(self):
+        # type: () -> Dict[str, _FieldStorageWithFile]
+        return {
+            key: value
+            for key, value in self.request.POST.items()
+            if getattr(value, "filename", None)
+        }
+
+    def size_of_file(self, postdata):
+        # type: (_FieldStorageWithFile) -> int
+        file = postdata.file
+        try:
+            return os.fstat(file.fileno()).st_size
+        except Exception:
+            return 0
+
+
+def _make_event_processor(weak_request, integration):
+    # type: (Callable[[], Request], PyramidIntegration) -> EventProcessor
+    def pyramid_event_processor(event, hint):
+        # type: (Event, Dict[str, Any]) -> Event
+        request = weak_request()
+        if request is None:
+            return event
+
+        with capture_internal_exceptions():
+            PyramidRequestExtractor(request).extract_into_event(event)
+
+        if should_send_default_pii():
+            with capture_internal_exceptions():
+                user_info = event.setdefault("user", {})
+                user_info.setdefault("id", authenticated_userid(request))
+
+        return event
+
+    return pyramid_event_processor
diff --git a/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/quart.py b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/quart.py
new file mode 100644
index 00000000..51306bb4
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/quart.py
@@ -0,0 +1,237 @@
+import asyncio
+import inspect
+from functools import wraps
+
+import sentry_sdk
+from sentry_sdk.integrations import DidNotEnable, Integration
+from sentry_sdk.integrations._wsgi_common import _filter_headers
+from sentry_sdk.integrations.asgi import SentryAsgiMiddleware
+from sentry_sdk.scope import should_send_default_pii
+from sentry_sdk.tracing import SOURCE_FOR_STYLE
+from sentry_sdk.utils import (
+    capture_internal_exceptions,
+    ensure_integration_enabled,
+    event_from_exception,
+)
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+    from typing import Any
+    from typing import Union
+
+    from sentry_sdk._types import Event, EventProcessor
+
+try:
+    import quart_auth  # type: ignore
+except ImportError:
+    quart_auth = None
+
+try:
+    from quart import (  # type: ignore
+        has_request_context,
+        has_websocket_context,
+        Request,
+        Quart,
+        request,
+        websocket,
+    )
+    from quart.signals import (  # type: ignore
+        got_background_exception,
+        got_request_exception,
+        got_websocket_exception,
+        request_started,
+        websocket_started,
+    )
+except ImportError:
+    raise DidNotEnable("Quart is not installed")
+else:
+    # Quart 0.19 is based on Flask and hence no longer has a Scaffold
+    try:
+        from quart.scaffold import Scaffold  # type: ignore
+    except ImportError:
+        from flask.sansio.scaffold import Scaffold  # type: ignore
+
+TRANSACTION_STYLE_VALUES = ("endpoint", "url")
+
+
+class QuartIntegration(Integration):
+    identifier = "quart"
+    origin = f"auto.http.{identifier}"
+
+    transaction_style = ""
+
+    def __init__(self, transaction_style="endpoint"):
+        # type: (str) -> None
+        if transaction_style not in TRANSACTION_STYLE_VALUES:
+            raise ValueError(
+                "Invalid value for transaction_style: %s (must be in %s)"
+                % (transaction_style, TRANSACTION_STYLE_VALUES)
+            )
+        self.transaction_style = transaction_style
+
+    @staticmethod
+    def setup_once():
+        # type: () -> None
+
+        request_started.connect(_request_websocket_started)
+        websocket_started.connect(_request_websocket_started)
+        got_background_exception.connect(_capture_exception)
+        got_request_exception.connect(_capture_exception)
+        got_websocket_exception.connect(_capture_exception)
+
+        patch_asgi_app()
+        patch_scaffold_route()
+
+
+def patch_asgi_app():
+    # type: () -> None
+    old_app = Quart.__call__
+
+    async def sentry_patched_asgi_app(self, scope, receive, send):
+        # type: (Any, Any, Any, Any) -> Any
+        if sentry_sdk.get_client().get_integration(QuartIntegration) is None:
+            return await old_app(self, scope, receive, send)
+
+        middleware = SentryAsgiMiddleware(
+            lambda *a, **kw: old_app(self, *a, **kw),
+            span_origin=QuartIntegration.origin,
+        )
+        middleware.__call__ = middleware._run_asgi3
+        return await middleware(scope, receive, send)
+
+    Quart.__call__ = sentry_patched_asgi_app
+
+
+def patch_scaffold_route():
+    # type: () -> None
+    old_route = Scaffold.route
+
+    def _sentry_route(*args, **kwargs):
+        # type: (*Any, **Any) -> Any
+        old_decorator = old_route(*args, **kwargs)
+
+        def decorator(old_func):
+            # type: (Any) -> Any
+
+            if inspect.isfunction(old_func) and not asyncio.iscoroutinefunction(
+                old_func
+            ):
+
+                @wraps(old_func)
+                @ensure_integration_enabled(QuartIntegration, old_func)
+                def _sentry_func(*args, **kwargs):
+                    # type: (*Any, **Any) -> Any
+                    current_scope = sentry_sdk.get_current_scope()
+                    if current_scope.transaction is not None:
+                        current_scope.transaction.update_active_thread()
+
+                    sentry_scope = sentry_sdk.get_isolation_scope()
+                    if sentry_scope.profile is not None:
+                        sentry_scope.profile.update_active_thread_id()
+
+                    return old_func(*args, **kwargs)
+
+                return old_decorator(_sentry_func)
+
+            return old_decorator(old_func)
+
+        return decorator
+
+    Scaffold.route = _sentry_route
+
+
+def _set_transaction_name_and_source(scope, transaction_style, request):
+    # type: (sentry_sdk.Scope, str, Request) -> None
+
+    try:
+        name_for_style = {
+            "url": request.url_rule.rule,
+            "endpoint": request.url_rule.endpoint,
+        }
+        scope.set_transaction_name(
+            name_for_style[transaction_style],
+            source=SOURCE_FOR_STYLE[transaction_style],
+        )
+    except Exception:
+        pass
+
+
+async def _request_websocket_started(app, **kwargs):
+    # type: (Quart, **Any) -> None
+    integration = sentry_sdk.get_client().get_integration(QuartIntegration)
+    if integration is None:
+        return
+
+    if has_request_context():
+        request_websocket = request._get_current_object()
+    if has_websocket_context():
+        request_websocket = websocket._get_current_object()
+
+    # Set the transaction name here, but rely on ASGI middleware
+    # to actually start the transaction
+    _set_transaction_name_and_source(
+        sentry_sdk.get_current_scope(), integration.transaction_style, request_websocket
+    )
+
+    scope = sentry_sdk.get_isolation_scope()
+    evt_processor = _make_request_event_processor(app, request_websocket, integration)
+    scope.add_event_processor(evt_processor)
+
+
+def _make_request_event_processor(app, request, integration):
+    # type: (Quart, Request, QuartIntegration) -> EventProcessor
+    def inner(event, hint):
+        # type: (Event, dict[str, Any]) -> Event
+        # if the request is gone we are fine not logging the data from
+        # it.  This might happen if the processor is pushed away to
+        # another thread.
+        if request is None:
+            return event
+
+        with capture_internal_exceptions():
+            # TODO: Figure out what to do with request body. Methods on request
+            # are async, but event processors are not.
+
+            request_info = event.setdefault("request", {})
+            request_info["url"] = request.url
+            request_info["query_string"] = request.query_string
+            request_info["method"] = request.method
+            request_info["headers"] = _filter_headers(dict(request.headers))
+
+            if should_send_default_pii():
+                request_info["env"] = {"REMOTE_ADDR": request.access_route[0]}
+                _add_user_to_event(event)
+
+        return event
+
+    return inner
+
+
+async def _capture_exception(sender, exception, **kwargs):
+    # type: (Quart, Union[ValueError, BaseException], **Any) -> None
+    integration = sentry_sdk.get_client().get_integration(QuartIntegration)
+    if integration is None:
+        return
+
+    event, hint = event_from_exception(
+        exception,
+        client_options=sentry_sdk.get_client().options,
+        mechanism={"type": "quart", "handled": False},
+    )
+
+    sentry_sdk.capture_event(event, hint=hint)
+
+
+def _add_user_to_event(event):
+    # type: (Event) -> None
+    if quart_auth is None:
+        return
+
+    user = quart_auth.current_user
+    if user is None:
+        return
+
+    with capture_internal_exceptions():
+        user_info = event.setdefault("user", {})
+
+        user_info["id"] = quart_auth.current_user._auth_id
diff --git a/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/ray.py b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/ray.py
new file mode 100644
index 00000000..0842b922
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/ray.py
@@ -0,0 +1,141 @@
+import inspect
+import sys
+
+import sentry_sdk
+from sentry_sdk.consts import OP, SPANSTATUS
+from sentry_sdk.integrations import _check_minimum_version, DidNotEnable, Integration
+from sentry_sdk.tracing import TransactionSource
+from sentry_sdk.utils import (
+    event_from_exception,
+    logger,
+    package_version,
+    qualname_from_function,
+    reraise,
+)
+
+try:
+    import ray  # type: ignore[import-not-found]
+except ImportError:
+    raise DidNotEnable("Ray not installed.")
+import functools
+
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+    from collections.abc import Callable
+    from typing import Any, Optional
+    from sentry_sdk.utils import ExcInfo
+
+
+def _check_sentry_initialized():
+    # type: () -> None
+    if sentry_sdk.get_client().is_active():
+        return
+
+    logger.debug(
+        "[Tracing] Sentry not initialized in ray cluster worker, performance data will be discarded."
+    )
+
+
+def _patch_ray_remote():
+    # type: () -> None
+    old_remote = ray.remote
+
+    @functools.wraps(old_remote)
+    def new_remote(f, *args, **kwargs):
+        # type: (Callable[..., Any], *Any, **Any) -> Callable[..., Any]
+        if inspect.isclass(f):
+            # Ray Actors
+            # (https://docs.ray.io/en/latest/ray-core/actors.html)
+            # are not supported
+            # (Only Ray Tasks are supported)
+            return old_remote(f, *args, *kwargs)
+
+        def _f(*f_args, _tracing=None, **f_kwargs):
+            # type: (Any, Optional[dict[str, Any]],  Any) -> Any
+            """
+            Ray Worker
+            """
+            _check_sentry_initialized()
+
+            transaction = sentry_sdk.continue_trace(
+                _tracing or {},
+                op=OP.QUEUE_TASK_RAY,
+                name=qualname_from_function(f),
+                origin=RayIntegration.origin,
+                source=TransactionSource.TASK,
+            )
+
+            with sentry_sdk.start_transaction(transaction) as transaction:
+                try:
+                    result = f(*f_args, **f_kwargs)
+                    transaction.set_status(SPANSTATUS.OK)
+                except Exception:
+                    transaction.set_status(SPANSTATUS.INTERNAL_ERROR)
+                    exc_info = sys.exc_info()
+                    _capture_exception(exc_info)
+                    reraise(*exc_info)
+
+                return result
+
+        rv = old_remote(_f, *args, *kwargs)
+        old_remote_method = rv.remote
+
+        def _remote_method_with_header_propagation(*args, **kwargs):
+            # type: (*Any, **Any) -> Any
+            """
+            Ray Client
+            """
+            with sentry_sdk.start_span(
+                op=OP.QUEUE_SUBMIT_RAY,
+                name=qualname_from_function(f),
+                origin=RayIntegration.origin,
+            ) as span:
+                tracing = {
+                    k: v
+                    for k, v in sentry_sdk.get_current_scope().iter_trace_propagation_headers()
+                }
+                try:
+                    result = old_remote_method(*args, **kwargs, _tracing=tracing)
+                    span.set_status(SPANSTATUS.OK)
+                except Exception:
+                    span.set_status(SPANSTATUS.INTERNAL_ERROR)
+                    exc_info = sys.exc_info()
+                    _capture_exception(exc_info)
+                    reraise(*exc_info)
+
+                return result
+
+        rv.remote = _remote_method_with_header_propagation
+
+        return rv
+
+    ray.remote = new_remote
+
+
+def _capture_exception(exc_info, **kwargs):
+    # type: (ExcInfo, **Any) -> None
+    client = sentry_sdk.get_client()
+
+    event, hint = event_from_exception(
+        exc_info,
+        client_options=client.options,
+        mechanism={
+            "handled": False,
+            "type": RayIntegration.identifier,
+        },
+    )
+    sentry_sdk.capture_event(event, hint=hint)
+
+
+class RayIntegration(Integration):
+    identifier = "ray"
+    origin = f"auto.queue.{identifier}"
+
+    @staticmethod
+    def setup_once():
+        # type: () -> None
+        version = package_version("ray")
+        _check_minimum_version(RayIntegration, version)
+
+        _patch_ray_remote()
diff --git a/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/redis/__init__.py b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/redis/__init__.py
new file mode 100644
index 00000000..f4431382
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/redis/__init__.py
@@ -0,0 +1,38 @@
+from sentry_sdk.integrations import Integration, DidNotEnable
+from sentry_sdk.integrations.redis.consts import _DEFAULT_MAX_DATA_SIZE
+from sentry_sdk.integrations.redis.rb import _patch_rb
+from sentry_sdk.integrations.redis.redis import _patch_redis
+from sentry_sdk.integrations.redis.redis_cluster import _patch_redis_cluster
+from sentry_sdk.integrations.redis.redis_py_cluster_legacy import _patch_rediscluster
+from sentry_sdk.utils import logger
+
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+    from typing import Optional
+
+
+class RedisIntegration(Integration):
+    identifier = "redis"
+
+    def __init__(self, max_data_size=_DEFAULT_MAX_DATA_SIZE, cache_prefixes=None):
+        # type: (int, Optional[list[str]]) -> None
+        self.max_data_size = max_data_size
+        self.cache_prefixes = cache_prefixes if cache_prefixes is not None else []
+
+    @staticmethod
+    def setup_once():
+        # type: () -> None
+        try:
+            from redis import StrictRedis, client
+        except ImportError:
+            raise DidNotEnable("Redis client not installed")
+
+        _patch_redis(StrictRedis, client)
+        _patch_redis_cluster()
+        _patch_rb()
+
+        try:
+            _patch_rediscluster()
+        except Exception:
+            logger.exception("Error occurred while patching `rediscluster` library")
diff --git a/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/redis/_async_common.py b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/redis/_async_common.py
new file mode 100644
index 00000000..196e85e7
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/redis/_async_common.py
@@ -0,0 +1,108 @@
+import sentry_sdk
+from sentry_sdk.consts import OP
+from sentry_sdk.integrations.redis.consts import SPAN_ORIGIN
+from sentry_sdk.integrations.redis.modules.caches import (
+    _compile_cache_span_properties,
+    _set_cache_data,
+)
+from sentry_sdk.integrations.redis.modules.queries import _compile_db_span_properties
+from sentry_sdk.integrations.redis.utils import (
+    _set_client_data,
+    _set_pipeline_data,
+)
+from sentry_sdk.tracing import Span
+from sentry_sdk.utils import capture_internal_exceptions
+
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+    from collections.abc import Callable
+    from typing import Any, Union
+    from redis.asyncio.client import Pipeline, StrictRedis
+    from redis.asyncio.cluster import ClusterPipeline, RedisCluster
+
+
+def patch_redis_async_pipeline(
+    pipeline_cls, is_cluster, get_command_args_fn, set_db_data_fn
+):
+    # type: (Union[type[Pipeline[Any]], type[ClusterPipeline[Any]]], bool, Any, Callable[[Span, Any], None]) -> None
+    old_execute = pipeline_cls.execute
+
+    from sentry_sdk.integrations.redis import RedisIntegration
+
+    async def _sentry_execute(self, *args, **kwargs):
+        # type: (Any, *Any, **Any) -> Any
+        if sentry_sdk.get_client().get_integration(RedisIntegration) is None:
+            return await old_execute(self, *args, **kwargs)
+
+        with sentry_sdk.start_span(
+            op=OP.DB_REDIS,
+            name="redis.pipeline.execute",
+            origin=SPAN_ORIGIN,
+        ) as span:
+            with capture_internal_exceptions():
+                set_db_data_fn(span, self)
+                _set_pipeline_data(
+                    span,
+                    is_cluster,
+                    get_command_args_fn,
+                    False if is_cluster else self.is_transaction,
+                    self._command_stack if is_cluster else self.command_stack,
+                )
+
+            return await old_execute(self, *args, **kwargs)
+
+    pipeline_cls.execute = _sentry_execute  # type: ignore
+
+
+def patch_redis_async_client(cls, is_cluster, set_db_data_fn):
+    # type: (Union[type[StrictRedis[Any]], type[RedisCluster[Any]]], bool, Callable[[Span, Any], None]) -> None
+    old_execute_command = cls.execute_command
+
+    from sentry_sdk.integrations.redis import RedisIntegration
+
+    async def _sentry_execute_command(self, name, *args, **kwargs):
+        # type: (Any, str, *Any, **Any) -> Any
+        integration = sentry_sdk.get_client().get_integration(RedisIntegration)
+        if integration is None:
+            return await old_execute_command(self, name, *args, **kwargs)
+
+        cache_properties = _compile_cache_span_properties(
+            name,
+            args,
+            kwargs,
+            integration,
+        )
+
+        cache_span = None
+        if cache_properties["is_cache_key"] and cache_properties["op"] is not None:
+            cache_span = sentry_sdk.start_span(
+                op=cache_properties["op"],
+                name=cache_properties["description"],
+                origin=SPAN_ORIGIN,
+            )
+            cache_span.__enter__()
+
+        db_properties = _compile_db_span_properties(integration, name, args)
+
+        db_span = sentry_sdk.start_span(
+            op=db_properties["op"],
+            name=db_properties["description"],
+            origin=SPAN_ORIGIN,
+        )
+        db_span.__enter__()
+
+        set_db_data_fn(db_span, self)
+        _set_client_data(db_span, is_cluster, name, *args)
+
+        value = await old_execute_command(self, name, *args, **kwargs)
+
+        db_span.__exit__(None, None, None)
+
+        if cache_span:
+            _set_cache_data(cache_span, self, cache_properties, value)
+            cache_span.__exit__(None, None, None)
+
+        return value
+
+    cls.execute_command = _sentry_execute_command  # type: ignore
diff --git a/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/redis/_sync_common.py b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/redis/_sync_common.py
new file mode 100644
index 00000000..ef10e9e4
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/redis/_sync_common.py
@@ -0,0 +1,113 @@
+import sentry_sdk
+from sentry_sdk.consts import OP
+from sentry_sdk.integrations.redis.consts import SPAN_ORIGIN
+from sentry_sdk.integrations.redis.modules.caches import (
+    _compile_cache_span_properties,
+    _set_cache_data,
+)
+from sentry_sdk.integrations.redis.modules.queries import _compile_db_span_properties
+from sentry_sdk.integrations.redis.utils import (
+    _set_client_data,
+    _set_pipeline_data,
+)
+from sentry_sdk.tracing import Span
+from sentry_sdk.utils import capture_internal_exceptions
+
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+    from collections.abc import Callable
+    from typing import Any
+
+
+def patch_redis_pipeline(
+    pipeline_cls,
+    is_cluster,
+    get_command_args_fn,
+    set_db_data_fn,
+):
+    # type: (Any, bool, Any, Callable[[Span, Any], None]) -> None
+    old_execute = pipeline_cls.execute
+
+    from sentry_sdk.integrations.redis import RedisIntegration
+
+    def sentry_patched_execute(self, *args, **kwargs):
+        # type: (Any, *Any, **Any) -> Any
+        if sentry_sdk.get_client().get_integration(RedisIntegration) is None:
+            return old_execute(self, *args, **kwargs)
+
+        with sentry_sdk.start_span(
+            op=OP.DB_REDIS,
+            name="redis.pipeline.execute",
+            origin=SPAN_ORIGIN,
+        ) as span:
+            with capture_internal_exceptions():
+                set_db_data_fn(span, self)
+                _set_pipeline_data(
+                    span,
+                    is_cluster,
+                    get_command_args_fn,
+                    False if is_cluster else self.transaction,
+                    self.command_stack,
+                )
+
+            return old_execute(self, *args, **kwargs)
+
+    pipeline_cls.execute = sentry_patched_execute
+
+
+def patch_redis_client(cls, is_cluster, set_db_data_fn):
+    # type: (Any, bool, Callable[[Span, Any], None]) -> None
+    """
+    This function can be used to instrument custom redis client classes or
+    subclasses.
+    """
+    old_execute_command = cls.execute_command
+
+    from sentry_sdk.integrations.redis import RedisIntegration
+
+    def sentry_patched_execute_command(self, name, *args, **kwargs):
+        # type: (Any, str, *Any, **Any) -> Any
+        integration = sentry_sdk.get_client().get_integration(RedisIntegration)
+        if integration is None:
+            return old_execute_command(self, name, *args, **kwargs)
+
+        cache_properties = _compile_cache_span_properties(
+            name,
+            args,
+            kwargs,
+            integration,
+        )
+
+        cache_span = None
+        if cache_properties["is_cache_key"] and cache_properties["op"] is not None:
+            cache_span = sentry_sdk.start_span(
+                op=cache_properties["op"],
+                name=cache_properties["description"],
+                origin=SPAN_ORIGIN,
+            )
+            cache_span.__enter__()
+
+        db_properties = _compile_db_span_properties(integration, name, args)
+
+        db_span = sentry_sdk.start_span(
+            op=db_properties["op"],
+            name=db_properties["description"],
+            origin=SPAN_ORIGIN,
+        )
+        db_span.__enter__()
+
+        set_db_data_fn(db_span, self)
+        _set_client_data(db_span, is_cluster, name, *args)
+
+        value = old_execute_command(self, name, *args, **kwargs)
+
+        db_span.__exit__(None, None, None)
+
+        if cache_span:
+            _set_cache_data(cache_span, self, cache_properties, value)
+            cache_span.__exit__(None, None, None)
+
+        return value
+
+    cls.execute_command = sentry_patched_execute_command
diff --git a/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/redis/consts.py b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/redis/consts.py
new file mode 100644
index 00000000..737e8297
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/redis/consts.py
@@ -0,0 +1,19 @@
+SPAN_ORIGIN = "auto.db.redis"
+
+_SINGLE_KEY_COMMANDS = frozenset(
+    ["decr", "decrby", "get", "incr", "incrby", "pttl", "set", "setex", "setnx", "ttl"],
+)
+_MULTI_KEY_COMMANDS = frozenset(
+    [
+        "del",
+        "touch",
+        "unlink",
+        "mget",
+    ],
+)
+_COMMANDS_INCLUDING_SENSITIVE_DATA = [
+    "auth",
+]
+_MAX_NUM_ARGS = 10  # Trim argument lists to this many values
+_MAX_NUM_COMMANDS = 10  # Trim command lists to this many values
+_DEFAULT_MAX_DATA_SIZE = 1024
diff --git a/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/redis/modules/__init__.py b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/redis/modules/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/redis/modules/__init__.py
diff --git a/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/redis/modules/caches.py b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/redis/modules/caches.py
new file mode 100644
index 00000000..c6fc19f5
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/redis/modules/caches.py
@@ -0,0 +1,121 @@
+"""
+Code used for the Caches module in Sentry
+"""
+
+from sentry_sdk.consts import OP, SPANDATA
+from sentry_sdk.integrations.redis.utils import _get_safe_key, _key_as_string
+from sentry_sdk.utils import capture_internal_exceptions
+
+GET_COMMANDS = ("get", "mget")
+SET_COMMANDS = ("set", "setex")
+
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+    from sentry_sdk.integrations.redis import RedisIntegration
+    from sentry_sdk.tracing import Span
+    from typing import Any, Optional
+
+
+def _get_op(name):
+    # type: (str) -> Optional[str]
+    op = None
+    if name.lower() in GET_COMMANDS:
+        op = OP.CACHE_GET
+    elif name.lower() in SET_COMMANDS:
+        op = OP.CACHE_PUT
+
+    return op
+
+
+def _compile_cache_span_properties(redis_command, args, kwargs, integration):
+    # type: (str, tuple[Any, ...], dict[str, Any], RedisIntegration) -> dict[str, Any]
+    key = _get_safe_key(redis_command, args, kwargs)
+    key_as_string = _key_as_string(key)
+    keys_as_string = key_as_string.split(", ")
+
+    is_cache_key = False
+    for prefix in integration.cache_prefixes:
+        for kee in keys_as_string:
+            if kee.startswith(prefix):
+                is_cache_key = True
+                break
+        if is_cache_key:
+            break
+
+    value = None
+    if redis_command.lower() in SET_COMMANDS:
+        value = args[-1]
+
+    properties = {
+        "op": _get_op(redis_command),
+        "description": _get_cache_span_description(
+            redis_command, args, kwargs, integration
+        ),
+        "key": key,
+        "key_as_string": key_as_string,
+        "redis_command": redis_command.lower(),
+        "is_cache_key": is_cache_key,
+        "value": value,
+    }
+
+    return properties
+
+
+def _get_cache_span_description(redis_command, args, kwargs, integration):
+    # type: (str, tuple[Any, ...], dict[str, Any], RedisIntegration) -> str
+    description = _key_as_string(_get_safe_key(redis_command, args, kwargs))
+
+    data_should_be_truncated = (
+        integration.max_data_size and len(description) > integration.max_data_size
+    )
+    if data_should_be_truncated:
+        description = description[: integration.max_data_size - len("...")] + "..."
+
+    return description
+
+
+def _set_cache_data(span, redis_client, properties, return_value):
+    # type: (Span, Any, dict[str, Any], Optional[Any]) -> None
+    with capture_internal_exceptions():
+        span.set_data(SPANDATA.CACHE_KEY, properties["key"])
+
+        if properties["redis_command"] in GET_COMMANDS:
+            if return_value is not None:
+                span.set_data(SPANDATA.CACHE_HIT, True)
+                size = (
+                    len(str(return_value).encode("utf-8"))
+                    if not isinstance(return_value, bytes)
+                    else len(return_value)
+                )
+                span.set_data(SPANDATA.CACHE_ITEM_SIZE, size)
+            else:
+                span.set_data(SPANDATA.CACHE_HIT, False)
+
+        elif properties["redis_command"] in SET_COMMANDS:
+            if properties["value"] is not None:
+                size = (
+                    len(properties["value"].encode("utf-8"))
+                    if not isinstance(properties["value"], bytes)
+                    else len(properties["value"])
+                )
+                span.set_data(SPANDATA.CACHE_ITEM_SIZE, size)
+
+        try:
+            connection_params = redis_client.connection_pool.connection_kwargs
+        except AttributeError:
+            # If it is a cluster, there is no connection_pool attribute so we
+            # need to get the default node from the cluster instance
+            default_node = redis_client.get_default_node()
+            connection_params = {
+                "host": default_node.host,
+                "port": default_node.port,
+            }
+
+        host = connection_params.get("host")
+        if host is not None:
+            span.set_data(SPANDATA.NETWORK_PEER_ADDRESS, host)
+
+        port = connection_params.get("port")
+        if port is not None:
+            span.set_data(SPANDATA.NETWORK_PEER_PORT, port)
diff --git a/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/redis/modules/queries.py b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/redis/modules/queries.py
new file mode 100644
index 00000000..e0d85a4e
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/redis/modules/queries.py
@@ -0,0 +1,68 @@
+"""
+Code used for the Queries module in Sentry
+"""
+
+from sentry_sdk.consts import OP, SPANDATA
+from sentry_sdk.integrations.redis.utils import _get_safe_command
+from sentry_sdk.utils import capture_internal_exceptions
+
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+    from redis import Redis
+    from sentry_sdk.integrations.redis import RedisIntegration
+    from sentry_sdk.tracing import Span
+    from typing import Any
+
+
+def _compile_db_span_properties(integration, redis_command, args):
+    # type: (RedisIntegration, str, tuple[Any, ...]) -> dict[str, Any]
+    description = _get_db_span_description(integration, redis_command, args)
+
+    properties = {
+        "op": OP.DB_REDIS,
+        "description": description,
+    }
+
+    return properties
+
+
+def _get_db_span_description(integration, command_name, args):
+    # type: (RedisIntegration, str, tuple[Any, ...]) -> str
+    description = command_name
+
+    with capture_internal_exceptions():
+        description = _get_safe_command(command_name, args)
+
+    data_should_be_truncated = (
+        integration.max_data_size and len(description) > integration.max_data_size
+    )
+    if data_should_be_truncated:
+        description = description[: integration.max_data_size - len("...")] + "..."
+
+    return description
+
+
+def _set_db_data_on_span(span, connection_params):
+    # type: (Span, dict[str, Any]) -> None
+    span.set_data(SPANDATA.DB_SYSTEM, "redis")
+
+    db = connection_params.get("db")
+    if db is not None:
+        span.set_data(SPANDATA.DB_NAME, str(db))
+
+    host = connection_params.get("host")
+    if host is not None:
+        span.set_data(SPANDATA.SERVER_ADDRESS, host)
+
+    port = connection_params.get("port")
+    if port is not None:
+        span.set_data(SPANDATA.SERVER_PORT, port)
+
+
+def _set_db_data(span, redis_instance):
+    # type: (Span, Redis[Any]) -> None
+    try:
+        _set_db_data_on_span(span, redis_instance.connection_pool.connection_kwargs)
+    except AttributeError:
+        pass  # connections_kwargs may be missing in some cases
diff --git a/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/redis/rb.py b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/redis/rb.py
new file mode 100644
index 00000000..1b3e2e53
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/redis/rb.py
@@ -0,0 +1,32 @@
+"""
+Instrumentation for Redis Blaster (rb)
+
+https://github.com/getsentry/rb
+"""
+
+from sentry_sdk.integrations.redis._sync_common import patch_redis_client
+from sentry_sdk.integrations.redis.modules.queries import _set_db_data
+
+
+def _patch_rb():
+    # type: () -> None
+    try:
+        import rb.clients  # type: ignore
+    except ImportError:
+        pass
+    else:
+        patch_redis_client(
+            rb.clients.FanoutClient,
+            is_cluster=False,
+            set_db_data_fn=_set_db_data,
+        )
+        patch_redis_client(
+            rb.clients.MappingClient,
+            is_cluster=False,
+            set_db_data_fn=_set_db_data,
+        )
+        patch_redis_client(
+            rb.clients.RoutingClient,
+            is_cluster=False,
+            set_db_data_fn=_set_db_data,
+        )
diff --git a/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/redis/redis.py b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/redis/redis.py
new file mode 100644
index 00000000..c92958a3
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/redis/redis.py
@@ -0,0 +1,69 @@
+"""
+Instrumentation for Redis
+
+https://github.com/redis/redis-py
+"""
+
+from sentry_sdk.integrations.redis._sync_common import (
+    patch_redis_client,
+    patch_redis_pipeline,
+)
+from sentry_sdk.integrations.redis.modules.queries import _set_db_data
+
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+    from typing import Any, Sequence
+
+
+def _get_redis_command_args(command):
+    # type: (Any) -> Sequence[Any]
+    return command[0]
+
+
+def _patch_redis(StrictRedis, client):  # noqa: N803
+    # type: (Any, Any) -> None
+    patch_redis_client(
+        StrictRedis,
+        is_cluster=False,
+        set_db_data_fn=_set_db_data,
+    )
+    patch_redis_pipeline(
+        client.Pipeline,
+        is_cluster=False,
+        get_command_args_fn=_get_redis_command_args,
+        set_db_data_fn=_set_db_data,
+    )
+    try:
+        strict_pipeline = client.StrictPipeline
+    except AttributeError:
+        pass
+    else:
+        patch_redis_pipeline(
+            strict_pipeline,
+            is_cluster=False,
+            get_command_args_fn=_get_redis_command_args,
+            set_db_data_fn=_set_db_data,
+        )
+
+    try:
+        import redis.asyncio
+    except ImportError:
+        pass
+    else:
+        from sentry_sdk.integrations.redis._async_common import (
+            patch_redis_async_client,
+            patch_redis_async_pipeline,
+        )
+
+        patch_redis_async_client(
+            redis.asyncio.client.StrictRedis,
+            is_cluster=False,
+            set_db_data_fn=_set_db_data,
+        )
+        patch_redis_async_pipeline(
+            redis.asyncio.client.Pipeline,
+            False,
+            _get_redis_command_args,
+            set_db_data_fn=_set_db_data,
+        )
diff --git a/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/redis/redis_cluster.py b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/redis/redis_cluster.py
new file mode 100644
index 00000000..80cdc723
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/redis/redis_cluster.py
@@ -0,0 +1,99 @@
+"""
+Instrumentation for RedisCluster
+This is part of the main redis-py client.
+
+https://github.com/redis/redis-py/blob/master/redis/cluster.py
+"""
+
+from sentry_sdk.integrations.redis._sync_common import (
+    patch_redis_client,
+    patch_redis_pipeline,
+)
+from sentry_sdk.integrations.redis.modules.queries import _set_db_data_on_span
+from sentry_sdk.integrations.redis.utils import _parse_rediscluster_command
+
+from sentry_sdk.utils import capture_internal_exceptions
+
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+    from typing import Any
+    from redis import RedisCluster
+    from redis.asyncio.cluster import (
+        RedisCluster as AsyncRedisCluster,
+        ClusterPipeline as AsyncClusterPipeline,
+    )
+    from sentry_sdk.tracing import Span
+
+
+def _set_async_cluster_db_data(span, async_redis_cluster_instance):
+    # type: (Span, AsyncRedisCluster[Any]) -> None
+    default_node = async_redis_cluster_instance.get_default_node()
+    if default_node is not None and default_node.connection_kwargs is not None:
+        _set_db_data_on_span(span, default_node.connection_kwargs)
+
+
+def _set_async_cluster_pipeline_db_data(span, async_redis_cluster_pipeline_instance):
+    # type: (Span, AsyncClusterPipeline[Any]) -> None
+    with capture_internal_exceptions():
+        _set_async_cluster_db_data(
+            span,
+            # the AsyncClusterPipeline has always had a `_client` attr but it is private so potentially problematic and mypy
+            # does not recognize it - see https://github.com/redis/redis-py/blame/v5.0.0/redis/asyncio/cluster.py#L1386
+            async_redis_cluster_pipeline_instance._client,  # type: ignore[attr-defined]
+        )
+
+
+def _set_cluster_db_data(span, redis_cluster_instance):
+    # type: (Span, RedisCluster[Any]) -> None
+    default_node = redis_cluster_instance.get_default_node()
+
+    if default_node is not None:
+        connection_params = {
+            "host": default_node.host,
+            "port": default_node.port,
+        }
+        _set_db_data_on_span(span, connection_params)
+
+
+def _patch_redis_cluster():
+    # type: () -> None
+    """Patches the cluster module on redis SDK (as opposed to rediscluster library)"""
+    try:
+        from redis import RedisCluster, cluster
+    except ImportError:
+        pass
+    else:
+        patch_redis_client(
+            RedisCluster,
+            is_cluster=True,
+            set_db_data_fn=_set_cluster_db_data,
+        )
+        patch_redis_pipeline(
+            cluster.ClusterPipeline,
+            is_cluster=True,
+            get_command_args_fn=_parse_rediscluster_command,
+            set_db_data_fn=_set_cluster_db_data,
+        )
+
+    try:
+        from redis.asyncio import cluster as async_cluster
+    except ImportError:
+        pass
+    else:
+        from sentry_sdk.integrations.redis._async_common import (
+            patch_redis_async_client,
+            patch_redis_async_pipeline,
+        )
+
+        patch_redis_async_client(
+            async_cluster.RedisCluster,
+            is_cluster=True,
+            set_db_data_fn=_set_async_cluster_db_data,
+        )
+        patch_redis_async_pipeline(
+            async_cluster.ClusterPipeline,
+            is_cluster=True,
+            get_command_args_fn=_parse_rediscluster_command,
+            set_db_data_fn=_set_async_cluster_pipeline_db_data,
+        )
diff --git a/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/redis/redis_py_cluster_legacy.py b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/redis/redis_py_cluster_legacy.py
new file mode 100644
index 00000000..ad1c2363
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/redis/redis_py_cluster_legacy.py
@@ -0,0 +1,50 @@
+"""
+Instrumentation for redis-py-cluster
+The project redis-py-cluster is EOL and was integrated into redis-py starting from version 4.1.0 (Dec 26, 2021).
+
+https://github.com/grokzen/redis-py-cluster
+"""
+
+from sentry_sdk.integrations.redis._sync_common import (
+    patch_redis_client,
+    patch_redis_pipeline,
+)
+from sentry_sdk.integrations.redis.modules.queries import _set_db_data
+from sentry_sdk.integrations.redis.utils import _parse_rediscluster_command
+
+
+def _patch_rediscluster():
+    # type: () -> None
+    try:
+        import rediscluster  # type: ignore
+    except ImportError:
+        return
+
+    patch_redis_client(
+        rediscluster.RedisCluster,
+        is_cluster=True,
+        set_db_data_fn=_set_db_data,
+    )
+
+    # up to v1.3.6, __version__ attribute is a tuple
+    # from v2.0.0, __version__ is a string and VERSION a tuple
+    version = getattr(rediscluster, "VERSION", rediscluster.__version__)
+
+    # StrictRedisCluster was introduced in v0.2.0 and removed in v2.0.0
+    # https://github.com/Grokzen/redis-py-cluster/blob/master/docs/release-notes.rst
+    if (0, 2, 0) < version < (2, 0, 0):
+        pipeline_cls = rediscluster.pipeline.StrictClusterPipeline
+        patch_redis_client(
+            rediscluster.StrictRedisCluster,
+            is_cluster=True,
+            set_db_data_fn=_set_db_data,
+        )
+    else:
+        pipeline_cls = rediscluster.pipeline.ClusterPipeline
+
+    patch_redis_pipeline(
+        pipeline_cls,
+        is_cluster=True,
+        get_command_args_fn=_parse_rediscluster_command,
+        set_db_data_fn=_set_db_data,
+    )
diff --git a/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/redis/utils.py b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/redis/utils.py
new file mode 100644
index 00000000..27fae1e8
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/redis/utils.py
@@ -0,0 +1,144 @@
+from sentry_sdk.consts import SPANDATA
+from sentry_sdk.integrations.redis.consts import (
+    _COMMANDS_INCLUDING_SENSITIVE_DATA,
+    _MAX_NUM_ARGS,
+    _MAX_NUM_COMMANDS,
+    _MULTI_KEY_COMMANDS,
+    _SINGLE_KEY_COMMANDS,
+)
+from sentry_sdk.scope import should_send_default_pii
+from sentry_sdk.utils import SENSITIVE_DATA_SUBSTITUTE
+
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+    from typing import Any, Optional, Sequence
+    from sentry_sdk.tracing import Span
+
+
+def _get_safe_command(name, args):
+    # type: (str, Sequence[Any]) -> str
+    command_parts = [name]
+
+    for i, arg in enumerate(args):
+        if i > _MAX_NUM_ARGS:
+            break
+
+        name_low = name.lower()
+
+        if name_low in _COMMANDS_INCLUDING_SENSITIVE_DATA:
+            command_parts.append(SENSITIVE_DATA_SUBSTITUTE)
+            continue
+
+        arg_is_the_key = i == 0
+        if arg_is_the_key:
+            command_parts.append(repr(arg))
+
+        else:
+            if should_send_default_pii():
+                command_parts.append(repr(arg))
+            else:
+                command_parts.append(SENSITIVE_DATA_SUBSTITUTE)
+
+    command = " ".join(command_parts)
+    return command
+
+
+def _safe_decode(key):
+    # type: (Any) -> str
+    if isinstance(key, bytes):
+        try:
+            return key.decode()
+        except UnicodeDecodeError:
+            return ""
+
+    return str(key)
+
+
+def _key_as_string(key):
+    # type: (Any) -> str
+    if isinstance(key, (dict, list, tuple)):
+        key = ", ".join(_safe_decode(x) for x in key)
+    elif isinstance(key, bytes):
+        key = _safe_decode(key)
+    elif key is None:
+        key = ""
+    else:
+        key = str(key)
+
+    return key
+
+
+def _get_safe_key(method_name, args, kwargs):
+    # type: (str, Optional[tuple[Any, ...]], Optional[dict[str, Any]]) -> Optional[tuple[str, ...]]
+    """
+    Gets the key (or keys) from the given method_name.
+    The method_name could be a redis command or a django caching command
+    """
+    key = None
+
+    if args is not None and method_name.lower() in _MULTI_KEY_COMMANDS:
+        # for example redis "mget"
+        key = tuple(args)
+
+    elif args is not None and len(args) >= 1:
+        # for example django "set_many/get_many" or redis "get"
+        if isinstance(args[0], (dict, list, tuple)):
+            key = tuple(args[0])
+        else:
+            key = (args[0],)
+
+    elif kwargs is not None and "key" in kwargs:
+        # this is a legacy case for older versions of Django
+        if isinstance(kwargs["key"], (list, tuple)):
+            if len(kwargs["key"]) > 0:
+                key = tuple(kwargs["key"])
+        else:
+            if kwargs["key"] is not None:
+                key = (kwargs["key"],)
+
+    return key
+
+
+def _parse_rediscluster_command(command):
+    # type: (Any) -> Sequence[Any]
+    return command.args
+
+
+def _set_pipeline_data(
+    span, is_cluster, get_command_args_fn, is_transaction, command_stack
+):
+    # type: (Span, bool, Any, bool, Sequence[Any]) -> None
+    span.set_tag("redis.is_cluster", is_cluster)
+    span.set_tag("redis.transaction", is_transaction)
+
+    commands = []
+    for i, arg in enumerate(command_stack):
+        if i >= _MAX_NUM_COMMANDS:
+            break
+
+        command = get_command_args_fn(arg)
+        commands.append(_get_safe_command(command[0], command[1:]))
+
+    span.set_data(
+        "redis.commands",
+        {
+            "count": len(command_stack),
+            "first_ten": commands,
+        },
+    )
+
+
+def _set_client_data(span, is_cluster, name, *args):
+    # type: (Span, bool, str, *Any) -> None
+    span.set_tag("redis.is_cluster", is_cluster)
+    if name:
+        span.set_tag("redis.command", name)
+        span.set_tag(SPANDATA.DB_OPERATION, name)
+
+    if name and args:
+        name_low = name.lower()
+        if (name_low in _SINGLE_KEY_COMMANDS) or (
+            name_low in _MULTI_KEY_COMMANDS and len(args) == 1
+        ):
+            span.set_tag("redis.key", args[0])
diff --git a/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/rq.py b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/rq.py
new file mode 100644
index 00000000..6d7fcf72
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/rq.py
@@ -0,0 +1,161 @@
+import weakref
+
+import sentry_sdk
+from sentry_sdk.consts import OP
+from sentry_sdk.api import continue_trace
+from sentry_sdk.integrations import _check_minimum_version, DidNotEnable, Integration
+from sentry_sdk.integrations.logging import ignore_logger
+from sentry_sdk.tracing import TransactionSource
+from sentry_sdk.utils import (
+    capture_internal_exceptions,
+    ensure_integration_enabled,
+    event_from_exception,
+    format_timestamp,
+    parse_version,
+)
+
+try:
+    from rq.queue import Queue
+    from rq.timeouts import JobTimeoutException
+    from rq.version import VERSION as RQ_VERSION
+    from rq.worker import Worker
+    from rq.job import JobStatus
+except ImportError:
+    raise DidNotEnable("RQ not installed")
+
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+    from typing import Any, Callable
+
+    from sentry_sdk._types import Event, EventProcessor
+    from sentry_sdk.utils import ExcInfo
+
+    from rq.job import Job
+
+
+class RqIntegration(Integration):
+    identifier = "rq"
+    origin = f"auto.queue.{identifier}"
+
+    @staticmethod
+    def setup_once():
+        # type: () -> None
+        version = parse_version(RQ_VERSION)
+        _check_minimum_version(RqIntegration, version)
+
+        old_perform_job = Worker.perform_job
+
+        @ensure_integration_enabled(RqIntegration, old_perform_job)
+        def sentry_patched_perform_job(self, job, *args, **kwargs):
+            # type: (Any, Job, *Queue, **Any) -> bool
+            with sentry_sdk.new_scope() as scope:
+                scope.clear_breadcrumbs()
+                scope.add_event_processor(_make_event_processor(weakref.ref(job)))
+
+                transaction = continue_trace(
+                    job.meta.get("_sentry_trace_headers") or {},
+                    op=OP.QUEUE_TASK_RQ,
+                    name="unknown RQ task",
+                    source=TransactionSource.TASK,
+                    origin=RqIntegration.origin,
+                )
+
+                with capture_internal_exceptions():
+                    transaction.name = job.func_name
+
+                with sentry_sdk.start_transaction(
+                    transaction,
+                    custom_sampling_context={"rq_job": job},
+                ):
+                    rv = old_perform_job(self, job, *args, **kwargs)
+
+            if self.is_horse:
+                # We're inside of a forked process and RQ is
+                # about to call `os._exit`. Make sure that our
+                # events get sent out.
+                sentry_sdk.get_client().flush()
+
+            return rv
+
+        Worker.perform_job = sentry_patched_perform_job
+
+        old_handle_exception = Worker.handle_exception
+
+        def sentry_patched_handle_exception(self, job, *exc_info, **kwargs):
+            # type: (Worker, Any, *Any, **Any) -> Any
+            retry = (
+                hasattr(job, "retries_left")
+                and job.retries_left
+                and job.retries_left > 0
+            )
+            failed = job._status == JobStatus.FAILED or job.is_failed
+            if failed and not retry:
+                _capture_exception(exc_info)
+
+            return old_handle_exception(self, job, *exc_info, **kwargs)
+
+        Worker.handle_exception = sentry_patched_handle_exception
+
+        old_enqueue_job = Queue.enqueue_job
+
+        @ensure_integration_enabled(RqIntegration, old_enqueue_job)
+        def sentry_patched_enqueue_job(self, job, **kwargs):
+            # type: (Queue, Any, **Any) -> Any
+            scope = sentry_sdk.get_current_scope()
+            if scope.span is not None:
+                job.meta["_sentry_trace_headers"] = dict(
+                    scope.iter_trace_propagation_headers()
+                )
+
+            return old_enqueue_job(self, job, **kwargs)
+
+        Queue.enqueue_job = sentry_patched_enqueue_job
+
+        ignore_logger("rq.worker")
+
+
+def _make_event_processor(weak_job):
+    # type: (Callable[[], Job]) -> EventProcessor
+    def event_processor(event, hint):
+        # type: (Event, dict[str, Any]) -> Event
+        job = weak_job()
+        if job is not None:
+            with capture_internal_exceptions():
+                extra = event.setdefault("extra", {})
+                rq_job = {
+                    "job_id": job.id,
+                    "func": job.func_name,
+                    "args": job.args,
+                    "kwargs": job.kwargs,
+                    "description": job.description,
+                }
+
+                if job.enqueued_at:
+                    rq_job["enqueued_at"] = format_timestamp(job.enqueued_at)
+                if job.started_at:
+                    rq_job["started_at"] = format_timestamp(job.started_at)
+
+                extra["rq-job"] = rq_job
+
+        if "exc_info" in hint:
+            with capture_internal_exceptions():
+                if issubclass(hint["exc_info"][0], JobTimeoutException):
+                    event["fingerprint"] = ["rq", "JobTimeoutException", job.func_name]
+
+        return event
+
+    return event_processor
+
+
+def _capture_exception(exc_info, **kwargs):
+    # type: (ExcInfo, **Any) -> None
+    client = sentry_sdk.get_client()
+
+    event, hint = event_from_exception(
+        exc_info,
+        client_options=client.options,
+        mechanism={"type": "rq", "handled": False},
+    )
+
+    sentry_sdk.capture_event(event, hint=hint)
diff --git a/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/rust_tracing.py b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/rust_tracing.py
new file mode 100644
index 00000000..e4c21181
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/rust_tracing.py
@@ -0,0 +1,284 @@
+"""
+This integration ingests tracing data from native extensions written in Rust.
+
+Using it requires additional setup on the Rust side to accept a
+`RustTracingLayer` Python object and register it with the `tracing-subscriber`
+using an adapter from the `pyo3-python-tracing-subscriber` crate. For example:
+```rust
+#[pyfunction]
+pub fn initialize_tracing(py_impl: Bound<'_, PyAny>) {
+    tracing_subscriber::registry()
+        .with(pyo3_python_tracing_subscriber::PythonCallbackLayerBridge::new(py_impl))
+        .init();
+}
+```
+
+Usage in Python would then look like:
+```
+sentry_sdk.init(
+    dsn=sentry_dsn,
+    integrations=[
+        RustTracingIntegration(
+            "demo_rust_extension",
+            demo_rust_extension.initialize_tracing,
+            event_type_mapping=event_type_mapping,
+        )
+    ],
+)
+```
+
+Each native extension requires its own integration.
+"""
+
+import json
+from enum import Enum, auto
+from typing import Any, Callable, Dict, Tuple, Optional
+
+import sentry_sdk
+from sentry_sdk.integrations import Integration
+from sentry_sdk.scope import should_send_default_pii
+from sentry_sdk.tracing import Span as SentrySpan
+from sentry_sdk.utils import SENSITIVE_DATA_SUBSTITUTE
+
+TraceState = Optional[Tuple[Optional[SentrySpan], SentrySpan]]
+
+
+class RustTracingLevel(Enum):
+    Trace = "TRACE"
+    Debug = "DEBUG"
+    Info = "INFO"
+    Warn = "WARN"
+    Error = "ERROR"
+
+
+class EventTypeMapping(Enum):
+    Ignore = auto()
+    Exc = auto()
+    Breadcrumb = auto()
+    Event = auto()
+
+
+def tracing_level_to_sentry_level(level):
+    # type: (str) -> sentry_sdk._types.LogLevelStr
+    level = RustTracingLevel(level)
+    if level in (RustTracingLevel.Trace, RustTracingLevel.Debug):
+        return "debug"
+    elif level == RustTracingLevel.Info:
+        return "info"
+    elif level == RustTracingLevel.Warn:
+        return "warning"
+    elif level == RustTracingLevel.Error:
+        return "error"
+    else:
+        # Better this than crashing
+        return "info"
+
+
+def extract_contexts(event: Dict[str, Any]) -> Dict[str, Any]:
+    metadata = event.get("metadata", {})
+    contexts = {}
+
+    location = {}
+    for field in ["module_path", "file", "line"]:
+        if field in metadata:
+            location[field] = metadata[field]
+    if len(location) > 0:
+        contexts["rust_tracing_location"] = location
+
+    fields = {}
+    for field in metadata.get("fields", []):
+        fields[field] = event.get(field)
+    if len(fields) > 0:
+        contexts["rust_tracing_fields"] = fields
+
+    return contexts
+
+
+def process_event(event: Dict[str, Any]) -> None:
+    metadata = event.get("metadata", {})
+
+    logger = metadata.get("target")
+    level = tracing_level_to_sentry_level(metadata.get("level"))
+    message = event.get("message")  # type: sentry_sdk._types.Any
+    contexts = extract_contexts(event)
+
+    sentry_event = {
+        "logger": logger,
+        "level": level,
+        "message": message,
+        "contexts": contexts,
+    }  # type: sentry_sdk._types.Event
+
+    sentry_sdk.capture_event(sentry_event)
+
+
+def process_exception(event: Dict[str, Any]) -> None:
+    process_event(event)
+
+
+def process_breadcrumb(event: Dict[str, Any]) -> None:
+    level = tracing_level_to_sentry_level(event.get("metadata", {}).get("level"))
+    message = event.get("message")
+
+    sentry_sdk.add_breadcrumb(level=level, message=message)
+
+
+def default_span_filter(metadata: Dict[str, Any]) -> bool:
+    return RustTracingLevel(metadata.get("level")) in (
+        RustTracingLevel.Error,
+        RustTracingLevel.Warn,
+        RustTracingLevel.Info,
+    )
+
+
+def default_event_type_mapping(metadata: Dict[str, Any]) -> EventTypeMapping:
+    level = RustTracingLevel(metadata.get("level"))
+    if level == RustTracingLevel.Error:
+        return EventTypeMapping.Exc
+    elif level in (RustTracingLevel.Warn, RustTracingLevel.Info):
+        return EventTypeMapping.Breadcrumb
+    elif level in (RustTracingLevel.Debug, RustTracingLevel.Trace):
+        return EventTypeMapping.Ignore
+    else:
+        return EventTypeMapping.Ignore
+
+
+class RustTracingLayer:
+    def __init__(
+        self,
+        origin: str,
+        event_type_mapping: Callable[
+            [Dict[str, Any]], EventTypeMapping
+        ] = default_event_type_mapping,
+        span_filter: Callable[[Dict[str, Any]], bool] = default_span_filter,
+        include_tracing_fields: Optional[bool] = None,
+    ):
+        self.origin = origin
+        self.event_type_mapping = event_type_mapping
+        self.span_filter = span_filter
+        self.include_tracing_fields = include_tracing_fields
+
+    def _include_tracing_fields(self) -> bool:
+        """
+        By default, the values of tracing fields are not included in case they
+        contain PII. A user may override that by passing `True` for the
+        `include_tracing_fields` keyword argument of this integration or by
+        setting `send_default_pii` to `True` in their Sentry client options.
+        """
+        return (
+            should_send_default_pii()
+            if self.include_tracing_fields is None
+            else self.include_tracing_fields
+        )
+
+    def on_event(self, event: str, _span_state: TraceState) -> None:
+        deserialized_event = json.loads(event)
+        metadata = deserialized_event.get("metadata", {})
+
+        event_type = self.event_type_mapping(metadata)
+        if event_type == EventTypeMapping.Ignore:
+            return
+        elif event_type == EventTypeMapping.Exc:
+            process_exception(deserialized_event)
+        elif event_type == EventTypeMapping.Breadcrumb:
+            process_breadcrumb(deserialized_event)
+        elif event_type == EventTypeMapping.Event:
+            process_event(deserialized_event)
+
+    def on_new_span(self, attrs: str, span_id: str) -> TraceState:
+        attrs = json.loads(attrs)
+        metadata = attrs.get("metadata", {})
+
+        if not self.span_filter(metadata):
+            return None
+
+        module_path = metadata.get("module_path")
+        name = metadata.get("name")
+        message = attrs.get("message")
+
+        if message is not None:
+            sentry_span_name = message
+        elif module_path is not None and name is not None:
+            sentry_span_name = f"{module_path}::{name}"  # noqa: E231
+        elif name is not None:
+            sentry_span_name = name
+        else:
+            sentry_span_name = "<unknown>"
+
+        kwargs = {
+            "op": "function",
+            "name": sentry_span_name,
+            "origin": self.origin,
+        }
+
+        scope = sentry_sdk.get_current_scope()
+        parent_sentry_span = scope.span
+        if parent_sentry_span:
+            sentry_span = parent_sentry_span.start_child(**kwargs)
+        else:
+            sentry_span = scope.start_span(**kwargs)
+
+        fields = metadata.get("fields", [])
+        for field in fields:
+            if self._include_tracing_fields():
+                sentry_span.set_data(field, attrs.get(field))
+            else:
+                sentry_span.set_data(field, SENSITIVE_DATA_SUBSTITUTE)
+
+        scope.span = sentry_span
+        return (parent_sentry_span, sentry_span)
+
+    def on_close(self, span_id: str, span_state: TraceState) -> None:
+        if span_state is None:
+            return
+
+        parent_sentry_span, sentry_span = span_state
+        sentry_span.finish()
+        sentry_sdk.get_current_scope().span = parent_sentry_span
+
+    def on_record(self, span_id: str, values: str, span_state: TraceState) -> None:
+        if span_state is None:
+            return
+        _parent_sentry_span, sentry_span = span_state
+
+        deserialized_values = json.loads(values)
+        for key, value in deserialized_values.items():
+            if self._include_tracing_fields():
+                sentry_span.set_data(key, value)
+            else:
+                sentry_span.set_data(key, SENSITIVE_DATA_SUBSTITUTE)
+
+
+class RustTracingIntegration(Integration):
+    """
+    Ingests tracing data from a Rust native extension's `tracing` instrumentation.
+
+    If a project uses more than one Rust native extension, each one will need
+    its own instance of `RustTracingIntegration` with an initializer function
+    specific to that extension.
+
+    Since all of the setup for this integration requires instance-specific state
+    which is not available in `setup_once()`, setup instead happens in `__init__()`.
+    """
+
+    def __init__(
+        self,
+        identifier: str,
+        initializer: Callable[[RustTracingLayer], None],
+        event_type_mapping: Callable[
+            [Dict[str, Any]], EventTypeMapping
+        ] = default_event_type_mapping,
+        span_filter: Callable[[Dict[str, Any]], bool] = default_span_filter,
+        include_tracing_fields: Optional[bool] = None,
+    ):
+        self.identifier = identifier
+        origin = f"auto.function.rust_tracing.{identifier}"
+        self.tracing_layer = RustTracingLayer(
+            origin, event_type_mapping, span_filter, include_tracing_fields
+        )
+
+        initializer(self.tracing_layer)
+
+    @staticmethod
+    def setup_once() -> None:
+        pass
diff --git a/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/sanic.py b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/sanic.py
new file mode 100644
index 00000000..bd8f1f32
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/sanic.py
@@ -0,0 +1,368 @@
+import sys
+import weakref
+from inspect import isawaitable
+from urllib.parse import urlsplit
+
+import sentry_sdk
+from sentry_sdk import continue_trace
+from sentry_sdk.consts import OP
+from sentry_sdk.integrations import _check_minimum_version, Integration, DidNotEnable
+from sentry_sdk.integrations._wsgi_common import RequestExtractor, _filter_headers
+from sentry_sdk.integrations.logging import ignore_logger
+from sentry_sdk.tracing import TransactionSource
+from sentry_sdk.utils import (
+    capture_internal_exceptions,
+    ensure_integration_enabled,
+    event_from_exception,
+    HAS_REAL_CONTEXTVARS,
+    CONTEXTVARS_ERROR_MESSAGE,
+    parse_version,
+    reraise,
+)
+
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+    from collections.abc import Container
+    from typing import Any
+    from typing import Callable
+    from typing import Optional
+    from typing import Union
+    from typing import Dict
+
+    from sanic.request import Request, RequestParameters
+    from sanic.response import BaseHTTPResponse
+
+    from sentry_sdk._types import Event, EventProcessor, ExcInfo, Hint
+    from sanic.router import Route
+
+try:
+    from sanic import Sanic, __version__ as SANIC_VERSION
+    from sanic.exceptions import SanicException
+    from sanic.router import Router
+    from sanic.handlers import ErrorHandler
+except ImportError:
+    raise DidNotEnable("Sanic not installed")
+
+old_error_handler_lookup = ErrorHandler.lookup
+old_handle_request = Sanic.handle_request
+old_router_get = Router.get
+
+try:
+    # This method was introduced in Sanic v21.9
+    old_startup = Sanic._startup
+except AttributeError:
+    pass
+
+
+class SanicIntegration(Integration):
+    identifier = "sanic"
+    origin = f"auto.http.{identifier}"
+    version = None
+
+    def __init__(self, unsampled_statuses=frozenset({404})):
+        # type: (Optional[Container[int]]) -> None
+        """
+        The unsampled_statuses parameter can be used to specify for which HTTP statuses the
+        transactions should not be sent to Sentry. By default, transactions are sent for all
+        HTTP statuses, except 404. Set unsampled_statuses to None to send transactions for all
+        HTTP statuses, including 404.
+        """
+        self._unsampled_statuses = unsampled_statuses or set()
+
+    @staticmethod
+    def setup_once():
+        # type: () -> None
+        SanicIntegration.version = parse_version(SANIC_VERSION)
+        _check_minimum_version(SanicIntegration, SanicIntegration.version)
+
+        if not HAS_REAL_CONTEXTVARS:
+            # We better have contextvars or we're going to leak state between
+            # requests.
+            raise DidNotEnable(
+                "The sanic integration for Sentry requires Python 3.7+ "
+                " or the aiocontextvars package." + CONTEXTVARS_ERROR_MESSAGE
+            )
+
+        if SANIC_VERSION.startswith("0.8."):
+            # Sanic 0.8 and older creates a logger named "root" and puts a
+            # stringified version of every exception in there (without exc_info),
+            # which our error deduplication can't detect.
+            #
+            # We explicitly check the version here because it is a very
+            # invasive step to ignore this logger and not necessary in newer
+            # versions at all.
+            #
+            # https://github.com/huge-success/sanic/issues/1332
+            ignore_logger("root")
+
+        if SanicIntegration.version is not None and SanicIntegration.version < (21, 9):
+            _setup_legacy_sanic()
+            return
+
+        _setup_sanic()
+
+
+class SanicRequestExtractor(RequestExtractor):
+    def content_length(self):
+        # type: () -> int
+        if self.request.body is None:
+            return 0
+        return len(self.request.body)
+
+    def cookies(self):
+        # type: () -> Dict[str, str]
+        return dict(self.request.cookies)
+
+    def raw_data(self):
+        # type: () -> bytes
+        return self.request.body
+
+    def form(self):
+        # type: () -> RequestParameters
+        return self.request.form
+
+    def is_json(self):
+        # type: () -> bool
+        raise NotImplementedError()
+
+    def json(self):
+        # type: () -> Optional[Any]
+        return self.request.json
+
+    def files(self):
+        # type: () -> RequestParameters
+        return self.request.files
+
+    def size_of_file(self, file):
+        # type: (Any) -> int
+        return len(file.body or ())
+
+
+def _setup_sanic():
+    # type: () -> None
+    Sanic._startup = _startup
+    ErrorHandler.lookup = _sentry_error_handler_lookup
+
+
+def _setup_legacy_sanic():
+    # type: () -> None
+    Sanic.handle_request = _legacy_handle_request
+    Router.get = _legacy_router_get
+    ErrorHandler.lookup = _sentry_error_handler_lookup
+
+
+async def _startup(self):
+    # type: (Sanic) -> None
+    # This happens about as early in the lifecycle as possible, just after the
+    # Request object is created. The body has not yet been consumed.
+    self.signal("http.lifecycle.request")(_context_enter)
+
+    # This happens after the handler is complete. In v21.9 this signal is not
+    # dispatched when there is an exception. Therefore we need to close out
+    # and call _context_exit from the custom exception handler as well.
+    # See https://github.com/sanic-org/sanic/issues/2297
+    self.signal("http.lifecycle.response")(_context_exit)
+
+    # This happens inside of request handling immediately after the route
+    # has been identified by the router.
+    self.signal("http.routing.after")(_set_transaction)
+
+    # The above signals need to be declared before this can be called.
+    await old_startup(self)
+
+
+async def _context_enter(request):
+    # type: (Request) -> None
+    request.ctx._sentry_do_integration = (
+        sentry_sdk.get_client().get_integration(SanicIntegration) is not None
+    )
+
+    if not request.ctx._sentry_do_integration:
+        return
+
+    weak_request = weakref.ref(request)
+    request.ctx._sentry_scope = sentry_sdk.isolation_scope()
+    scope = request.ctx._sentry_scope.__enter__()
+    scope.clear_breadcrumbs()
+    scope.add_event_processor(_make_request_processor(weak_request))
+
+    transaction = continue_trace(
+        dict(request.headers),
+        op=OP.HTTP_SERVER,
+        # Unless the request results in a 404 error, the name and source will get overwritten in _set_transaction
+        name=request.path,
+        source=TransactionSource.URL,
+        origin=SanicIntegration.origin,
+    )
+    request.ctx._sentry_transaction = sentry_sdk.start_transaction(
+        transaction
+    ).__enter__()
+
+
+async def _context_exit(request, response=None):
+    # type: (Request, Optional[BaseHTTPResponse]) -> None
+    with capture_internal_exceptions():
+        if not request.ctx._sentry_do_integration:
+            return
+
+        integration = sentry_sdk.get_client().get_integration(SanicIntegration)
+
+        response_status = None if response is None else response.status
+
+        # This capture_internal_exceptions block has been intentionally nested here, so that in case an exception
+        # happens while trying to end the transaction, we still attempt to exit the hub.
+        with capture_internal_exceptions():
+            request.ctx._sentry_transaction.set_http_status(response_status)
+            request.ctx._sentry_transaction.sampled &= (
+                isinstance(integration, SanicIntegration)
+                and response_status not in integration._unsampled_statuses
+            )
+            request.ctx._sentry_transaction.__exit__(None, None, None)
+
+        request.ctx._sentry_scope.__exit__(None, None, None)
+
+
+async def _set_transaction(request, route, **_):
+    # type: (Request, Route, **Any) -> None
+    if request.ctx._sentry_do_integration:
+        with capture_internal_exceptions():
+            scope = sentry_sdk.get_current_scope()
+            route_name = route.name.replace(request.app.name, "").strip(".")
+            scope.set_transaction_name(route_name, source=TransactionSource.COMPONENT)
+
+
+def _sentry_error_handler_lookup(self, exception, *args, **kwargs):
+    # type: (Any, Exception, *Any, **Any) -> Optional[object]
+    _capture_exception(exception)
+    old_error_handler = old_error_handler_lookup(self, exception, *args, **kwargs)
+
+    if old_error_handler is None:
+        return None
+
+    if sentry_sdk.get_client().get_integration(SanicIntegration) is None:
+        return old_error_handler
+
+    async def sentry_wrapped_error_handler(request, exception):
+        # type: (Request, Exception) -> Any
+        try:
+            response = old_error_handler(request, exception)
+            if isawaitable(response):
+                response = await response
+            return response
+        except Exception:
+            # Report errors that occur in Sanic error handler. These
+            # exceptions will not even show up in Sanic's
+            # `sanic.exceptions` logger.
+            exc_info = sys.exc_info()
+            _capture_exception(exc_info)
+            reraise(*exc_info)
+        finally:
+            # As mentioned in previous comment in _startup, this can be removed
+            # after https://github.com/sanic-org/sanic/issues/2297 is resolved
+            if SanicIntegration.version and SanicIntegration.version == (21, 9):
+                await _context_exit(request)
+
+    return sentry_wrapped_error_handler
+
+
+async def _legacy_handle_request(self, request, *args, **kwargs):
+    # type: (Any, Request, *Any, **Any) -> Any
+    if sentry_sdk.get_client().get_integration(SanicIntegration) is None:
+        return await old_handle_request(self, request, *args, **kwargs)
+
+    weak_request = weakref.ref(request)
+
+    with sentry_sdk.isolation_scope() as scope:
+        scope.clear_breadcrumbs()
+        scope.add_event_processor(_make_request_processor(weak_request))
+
+        response = old_handle_request(self, request, *args, **kwargs)
+        if isawaitable(response):
+            response = await response
+
+        return response
+
+
+def _legacy_router_get(self, *args):
+    # type: (Any, Union[Any, Request]) -> Any
+    rv = old_router_get(self, *args)
+    if sentry_sdk.get_client().get_integration(SanicIntegration) is not None:
+        with capture_internal_exceptions():
+            scope = sentry_sdk.get_isolation_scope()
+            if SanicIntegration.version and SanicIntegration.version >= (21, 3):
+                # Sanic versions above and including 21.3 append the app name to the
+                # route name, and so we need to remove it from Route name so the
+                # transaction name is consistent across all versions
+                sanic_app_name = self.ctx.app.name
+                sanic_route = rv[0].name
+
+                if sanic_route.startswith("%s." % sanic_app_name):
+                    # We add a 1 to the len of the sanic_app_name because there is a dot
+                    # that joins app name and the route name
+                    # Format: app_name.route_name
+                    sanic_route = sanic_route[len(sanic_app_name) + 1 :]
+
+                scope.set_transaction_name(
+                    sanic_route, source=TransactionSource.COMPONENT
+                )
+            else:
+                scope.set_transaction_name(
+                    rv[0].__name__, source=TransactionSource.COMPONENT
+                )
+
+    return rv
+
+
+@ensure_integration_enabled(SanicIntegration)
+def _capture_exception(exception):
+    # type: (Union[ExcInfo, BaseException]) -> None
+    with capture_internal_exceptions():
+        event, hint = event_from_exception(
+            exception,
+            client_options=sentry_sdk.get_client().options,
+            mechanism={"type": "sanic", "handled": False},
+        )
+
+        if hint and hasattr(hint["exc_info"][0], "quiet") and hint["exc_info"][0].quiet:
+            return
+
+        sentry_sdk.capture_event(event, hint=hint)
+
+
+def _make_request_processor(weak_request):
+    # type: (Callable[[], Request]) -> EventProcessor
+    def sanic_processor(event, hint):
+        # type: (Event, Optional[Hint]) -> Optional[Event]
+
+        try:
+            if hint and issubclass(hint["exc_info"][0], SanicException):
+                return None
+        except KeyError:
+            pass
+
+        request = weak_request()
+        if request is None:
+            return event
+
+        with capture_internal_exceptions():
+            extractor = SanicRequestExtractor(request)
+            extractor.extract_into_event(event)
+
+            request_info = event["request"]
+            urlparts = urlsplit(request.url)
+
+            request_info["url"] = "%s://%s%s" % (
+                urlparts.scheme,
+                urlparts.netloc,
+                urlparts.path,
+            )
+
+            request_info["query_string"] = urlparts.query
+            request_info["method"] = request.method
+            request_info["env"] = {"REMOTE_ADDR": request.remote_addr}
+            request_info["headers"] = _filter_headers(dict(request.headers))
+
+        return event
+
+    return sanic_processor
diff --git a/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/serverless.py b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/serverless.py
new file mode 100644
index 00000000..760c07ff
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/serverless.py
@@ -0,0 +1,76 @@
+import sys
+from functools import wraps
+
+import sentry_sdk
+from sentry_sdk.utils import event_from_exception, reraise
+
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+    from typing import Any
+    from typing import Callable
+    from typing import TypeVar
+    from typing import Union
+    from typing import Optional
+    from typing import overload
+
+    F = TypeVar("F", bound=Callable[..., Any])
+
+else:
+
+    def overload(x):
+        # type: (F) -> F
+        return x
+
+
+@overload
+def serverless_function(f, flush=True):
+    # type: (F, bool) -> F
+    pass
+
+
+@overload
+def serverless_function(f=None, flush=True):  # noqa: F811
+    # type: (None, bool) -> Callable[[F], F]
+    pass
+
+
+def serverless_function(f=None, flush=True):  # noqa
+    # type: (Optional[F], bool) -> Union[F, Callable[[F], F]]
+    def wrapper(f):
+        # type: (F) -> F
+        @wraps(f)
+        def inner(*args, **kwargs):
+            # type: (*Any, **Any) -> Any
+            with sentry_sdk.isolation_scope() as scope:
+                scope.clear_breadcrumbs()
+
+                try:
+                    return f(*args, **kwargs)
+                except Exception:
+                    _capture_and_reraise()
+                finally:
+                    if flush:
+                        sentry_sdk.flush()
+
+        return inner  # type: ignore
+
+    if f is None:
+        return wrapper
+    else:
+        return wrapper(f)
+
+
+def _capture_and_reraise():
+    # type: () -> None
+    exc_info = sys.exc_info()
+    client = sentry_sdk.get_client()
+    if client.is_active():
+        event, hint = event_from_exception(
+            exc_info,
+            client_options=client.options,
+            mechanism={"type": "serverless", "handled": False},
+        )
+        sentry_sdk.capture_event(event, hint=hint)
+
+    reraise(*exc_info)
diff --git a/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/socket.py b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/socket.py
new file mode 100644
index 00000000..babf61aa
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/socket.py
@@ -0,0 +1,96 @@
+import socket
+
+import sentry_sdk
+from sentry_sdk._types import MYPY
+from sentry_sdk.consts import OP
+from sentry_sdk.integrations import Integration
+
+if MYPY:
+    from socket import AddressFamily, SocketKind
+    from typing import Tuple, Optional, Union, List
+
+__all__ = ["SocketIntegration"]
+
+
+class SocketIntegration(Integration):
+    identifier = "socket"
+    origin = f"auto.socket.{identifier}"
+
+    @staticmethod
+    def setup_once():
+        # type: () -> None
+        """
+        patches two of the most used functions of socket: create_connection and getaddrinfo(dns resolver)
+        """
+        _patch_create_connection()
+        _patch_getaddrinfo()
+
+
+def _get_span_description(host, port):
+    # type: (Union[bytes, str, None], Union[bytes, str, int, None]) -> str
+
+    try:
+        host = host.decode()  # type: ignore
+    except (UnicodeDecodeError, AttributeError):
+        pass
+
+    try:
+        port = port.decode()  # type: ignore
+    except (UnicodeDecodeError, AttributeError):
+        pass
+
+    description = "%s:%s" % (host, port)  # type: ignore
+    return description
+
+
+def _patch_create_connection():
+    # type: () -> None
+    real_create_connection = socket.create_connection
+
+    def create_connection(
+        address,
+        timeout=socket._GLOBAL_DEFAULT_TIMEOUT,  # type: ignore
+        source_address=None,
+    ):
+        # type: (Tuple[Optional[str], int], Optional[float], Optional[Tuple[Union[bytearray, bytes, str], int]])-> socket.socket
+        integration = sentry_sdk.get_client().get_integration(SocketIntegration)
+        if integration is None:
+            return real_create_connection(address, timeout, source_address)
+
+        with sentry_sdk.start_span(
+            op=OP.SOCKET_CONNECTION,
+            name=_get_span_description(address[0], address[1]),
+            origin=SocketIntegration.origin,
+        ) as span:
+            span.set_data("address", address)
+            span.set_data("timeout", timeout)
+            span.set_data("source_address", source_address)
+
+            return real_create_connection(
+                address=address, timeout=timeout, source_address=source_address
+            )
+
+    socket.create_connection = create_connection  # type: ignore
+
+
+def _patch_getaddrinfo():
+    # type: () -> None
+    real_getaddrinfo = socket.getaddrinfo
+
+    def getaddrinfo(host, port, family=0, type=0, proto=0, flags=0):
+        # type: (Union[bytes, str, None], Union[bytes, str, int, None], int, int, int, int) -> List[Tuple[AddressFamily, SocketKind, int, str, Union[Tuple[str, int], Tuple[str, int, int, int], Tuple[int, bytes]]]]
+        integration = sentry_sdk.get_client().get_integration(SocketIntegration)
+        if integration is None:
+            return real_getaddrinfo(host, port, family, type, proto, flags)
+
+        with sentry_sdk.start_span(
+            op=OP.SOCKET_DNS,
+            name=_get_span_description(host, port),
+            origin=SocketIntegration.origin,
+        ) as span:
+            span.set_data("host", host)
+            span.set_data("port", port)
+
+            return real_getaddrinfo(host, port, family, type, proto, flags)
+
+    socket.getaddrinfo = getaddrinfo
diff --git a/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/spark/__init__.py b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/spark/__init__.py
new file mode 100644
index 00000000..10d94163
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/spark/__init__.py
@@ -0,0 +1,4 @@
+from sentry_sdk.integrations.spark.spark_driver import SparkIntegration
+from sentry_sdk.integrations.spark.spark_worker import SparkWorkerIntegration
+
+__all__ = ["SparkIntegration", "SparkWorkerIntegration"]
diff --git a/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/spark/spark_driver.py b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/spark/spark_driver.py
new file mode 100644
index 00000000..fac98535
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/spark/spark_driver.py
@@ -0,0 +1,315 @@
+import sentry_sdk
+from sentry_sdk.integrations import Integration
+from sentry_sdk.utils import capture_internal_exceptions, ensure_integration_enabled
+
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+    from typing import Any
+    from typing import Optional
+
+    from sentry_sdk._types import Event, Hint
+    from pyspark import SparkContext
+
+
+class SparkIntegration(Integration):
+    identifier = "spark"
+
+    @staticmethod
+    def setup_once():
+        # type: () -> None
+        _setup_sentry_tracing()
+
+
+def _set_app_properties():
+    # type: () -> None
+    """
+    Set properties in driver that propagate to worker processes, allowing for workers to have access to those properties.
+    This allows worker integration to have access to app_name and application_id.
+    """
+    from pyspark import SparkContext
+
+    spark_context = SparkContext._active_spark_context
+    if spark_context:
+        spark_context.setLocalProperty(
+            "sentry_app_name",
+            spark_context.appName,
+        )
+        spark_context.setLocalProperty(
+            "sentry_application_id",
+            spark_context.applicationId,
+        )
+
+
+def _start_sentry_listener(sc):
+    # type: (SparkContext) -> None
+    """
+    Start java gateway server to add custom `SparkListener`
+    """
+    from pyspark.java_gateway import ensure_callback_server_started
+
+    gw = sc._gateway
+    ensure_callback_server_started(gw)
+    listener = SentryListener()
+    sc._jsc.sc().addSparkListener(listener)
+
+
+def _add_event_processor(sc):
+    # type: (SparkContext) -> None
+    scope = sentry_sdk.get_isolation_scope()
+
+    @scope.add_event_processor
+    def process_event(event, hint):
+        # type: (Event, Hint) -> Optional[Event]
+        with capture_internal_exceptions():
+            if sentry_sdk.get_client().get_integration(SparkIntegration) is None:
+                return event
+
+            if sc._active_spark_context is None:
+                return event
+
+            event.setdefault("user", {}).setdefault("id", sc.sparkUser())
+
+            event.setdefault("tags", {}).setdefault(
+                "executor.id", sc._conf.get("spark.executor.id")
+            )
+            event["tags"].setdefault(
+                "spark-submit.deployMode",
+                sc._conf.get("spark.submit.deployMode"),
+            )
+            event["tags"].setdefault("driver.host", sc._conf.get("spark.driver.host"))
+            event["tags"].setdefault("driver.port", sc._conf.get("spark.driver.port"))
+            event["tags"].setdefault("spark_version", sc.version)
+            event["tags"].setdefault("app_name", sc.appName)
+            event["tags"].setdefault("application_id", sc.applicationId)
+            event["tags"].setdefault("master", sc.master)
+            event["tags"].setdefault("spark_home", sc.sparkHome)
+
+            event.setdefault("extra", {}).setdefault("web_url", sc.uiWebUrl)
+
+        return event
+
+
+def _activate_integration(sc):
+    # type: (SparkContext) -> None
+
+    _start_sentry_listener(sc)
+    _set_app_properties()
+    _add_event_processor(sc)
+
+
+def _patch_spark_context_init():
+    # type: () -> None
+    from pyspark import SparkContext
+
+    spark_context_init = SparkContext._do_init
+
+    @ensure_integration_enabled(SparkIntegration, spark_context_init)
+    def _sentry_patched_spark_context_init(self, *args, **kwargs):
+        # type: (SparkContext, *Any, **Any) -> Optional[Any]
+        rv = spark_context_init(self, *args, **kwargs)
+        _activate_integration(self)
+        return rv
+
+    SparkContext._do_init = _sentry_patched_spark_context_init
+
+
+def _setup_sentry_tracing():
+    # type: () -> None
+    from pyspark import SparkContext
+
+    if SparkContext._active_spark_context is not None:
+        _activate_integration(SparkContext._active_spark_context)
+        return
+    _patch_spark_context_init()
+
+
+class SparkListener:
+    def onApplicationEnd(self, applicationEnd):  # noqa: N802,N803
+        # type: (Any) -> None
+        pass
+
+    def onApplicationStart(self, applicationStart):  # noqa: N802,N803
+        # type: (Any) -> None
+        pass
+
+    def onBlockManagerAdded(self, blockManagerAdded):  # noqa: N802,N803
+        # type: (Any) -> None
+        pass
+
+    def onBlockManagerRemoved(self, blockManagerRemoved):  # noqa: N802,N803
+        # type: (Any) -> None
+        pass
+
+    def onBlockUpdated(self, blockUpdated):  # noqa: N802,N803
+        # type: (Any) -> None
+        pass
+
+    def onEnvironmentUpdate(self, environmentUpdate):  # noqa: N802,N803
+        # type: (Any) -> None
+        pass
+
+    def onExecutorAdded(self, executorAdded):  # noqa: N802,N803
+        # type: (Any) -> None
+        pass
+
+    def onExecutorBlacklisted(self, executorBlacklisted):  # noqa: N802,N803
+        # type: (Any) -> None
+        pass
+
+    def onExecutorBlacklistedForStage(  # noqa: N802
+        self, executorBlacklistedForStage  # noqa: N803
+    ):
+        # type: (Any) -> None
+        pass
+
+    def onExecutorMetricsUpdate(self, executorMetricsUpdate):  # noqa: N802,N803
+        # type: (Any) -> None
+        pass
+
+    def onExecutorRemoved(self, executorRemoved):  # noqa: N802,N803
+        # type: (Any) -> None
+        pass
+
+    def onJobEnd(self, jobEnd):  # noqa: N802,N803
+        # type: (Any) -> None
+        pass
+
+    def onJobStart(self, jobStart):  # noqa: N802,N803
+        # type: (Any) -> None
+        pass
+
+    def onNodeBlacklisted(self, nodeBlacklisted):  # noqa: N802,N803
+        # type: (Any) -> None
+        pass
+
+    def onNodeBlacklistedForStage(self, nodeBlacklistedForStage):  # noqa: N802,N803
+        # type: (Any) -> None
+        pass
+
+    def onNodeUnblacklisted(self, nodeUnblacklisted):  # noqa: N802,N803
+        # type: (Any) -> None
+        pass
+
+    def onOtherEvent(self, event):  # noqa: N802,N803
+        # type: (Any) -> None
+        pass
+
+    def onSpeculativeTaskSubmitted(self, speculativeTask):  # noqa: N802,N803
+        # type: (Any) -> None
+        pass
+
+    def onStageCompleted(self, stageCompleted):  # noqa: N802,N803
+        # type: (Any) -> None
+        pass
+
+    def onStageSubmitted(self, stageSubmitted):  # noqa: N802,N803
+        # type: (Any) -> None
+        pass
+
+    def onTaskEnd(self, taskEnd):  # noqa: N802,N803
+        # type: (Any) -> None
+        pass
+
+    def onTaskGettingResult(self, taskGettingResult):  # noqa: N802,N803
+        # type: (Any) -> None
+        pass
+
+    def onTaskStart(self, taskStart):  # noqa: N802,N803
+        # type: (Any) -> None
+        pass
+
+    def onUnpersistRDD(self, unpersistRDD):  # noqa: N802,N803
+        # type: (Any) -> None
+        pass
+
+    class Java:
+        implements = ["org.apache.spark.scheduler.SparkListenerInterface"]
+
+
+class SentryListener(SparkListener):
+    def _add_breadcrumb(
+        self,
+        level,  # type: str
+        message,  # type: str
+        data=None,  # type: Optional[dict[str, Any]]
+    ):
+        # type: (...) -> None
+        sentry_sdk.get_isolation_scope().add_breadcrumb(
+            level=level, message=message, data=data
+        )
+
+    def onJobStart(self, jobStart):  # noqa: N802,N803
+        # type: (Any) -> None
+        sentry_sdk.get_isolation_scope().clear_breadcrumbs()
+
+        message = "Job {} Started".format(jobStart.jobId())
+        self._add_breadcrumb(level="info", message=message)
+        _set_app_properties()
+
+    def onJobEnd(self, jobEnd):  # noqa: N802,N803
+        # type: (Any) -> None
+        level = ""
+        message = ""
+        data = {"result": jobEnd.jobResult().toString()}
+
+        if jobEnd.jobResult().toString() == "JobSucceeded":
+            level = "info"
+            message = "Job {} Ended".format(jobEnd.jobId())
+        else:
+            level = "warning"
+            message = "Job {} Failed".format(jobEnd.jobId())
+
+        self._add_breadcrumb(level=level, message=message, data=data)
+
+    def onStageSubmitted(self, stageSubmitted):  # noqa: N802,N803
+        # type: (Any) -> None
+        stage_info = stageSubmitted.stageInfo()
+        message = "Stage {} Submitted".format(stage_info.stageId())
+
+        data = {"name": stage_info.name()}
+        attempt_id = _get_attempt_id(stage_info)
+        if attempt_id is not None:
+            data["attemptId"] = attempt_id
+
+        self._add_breadcrumb(level="info", message=message, data=data)
+        _set_app_properties()
+
+    def onStageCompleted(self, stageCompleted):  # noqa: N802,N803
+        # type: (Any) -> None
+        from py4j.protocol import Py4JJavaError  # type: ignore
+
+        stage_info = stageCompleted.stageInfo()
+        message = ""
+        level = ""
+
+        data = {"name": stage_info.name()}
+        attempt_id = _get_attempt_id(stage_info)
+        if attempt_id is not None:
+            data["attemptId"] = attempt_id
+
+        # Have to Try Except because stageInfo.failureReason() is typed with Scala Option
+        try:
+            data["reason"] = stage_info.failureReason().get()
+            message = "Stage {} Failed".format(stage_info.stageId())
+            level = "warning"
+        except Py4JJavaError:
+            message = "Stage {} Completed".format(stage_info.stageId())
+            level = "info"
+
+        self._add_breadcrumb(level=level, message=message, data=data)
+
+
+def _get_attempt_id(stage_info):
+    # type: (Any) -> Optional[int]
+    try:
+        return stage_info.attemptId()
+    except Exception:
+        pass
+
+    try:
+        return stage_info.attemptNumber()
+    except Exception:
+        pass
+
+    return None
diff --git a/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/spark/spark_worker.py b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/spark/spark_worker.py
new file mode 100644
index 00000000..5340a0b3
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/spark/spark_worker.py
@@ -0,0 +1,116 @@
+import sys
+
+import sentry_sdk
+from sentry_sdk.integrations import Integration
+from sentry_sdk.utils import (
+    capture_internal_exceptions,
+    exc_info_from_error,
+    single_exception_from_error_tuple,
+    walk_exception_chain,
+    event_hint_with_exc_info,
+)
+
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+    from typing import Any
+    from typing import Optional
+
+    from sentry_sdk._types import ExcInfo, Event, Hint
+
+
+class SparkWorkerIntegration(Integration):
+    identifier = "spark_worker"
+
+    @staticmethod
+    def setup_once():
+        # type: () -> None
+        import pyspark.daemon as original_daemon
+
+        original_daemon.worker_main = _sentry_worker_main
+
+
+def _capture_exception(exc_info):
+    # type: (ExcInfo) -> None
+    client = sentry_sdk.get_client()
+
+    mechanism = {"type": "spark", "handled": False}
+
+    exc_info = exc_info_from_error(exc_info)
+
+    exc_type, exc_value, tb = exc_info
+    rv = []
+
+    # On Exception worker will call sys.exit(-1), so we can ignore SystemExit and similar errors
+    for exc_type, exc_value, tb in walk_exception_chain(exc_info):
+        if exc_type not in (SystemExit, EOFError, ConnectionResetError):
+            rv.append(
+                single_exception_from_error_tuple(
+                    exc_type, exc_value, tb, client.options, mechanism
+                )
+            )
+
+    if rv:
+        rv.reverse()
+        hint = event_hint_with_exc_info(exc_info)
+        event = {"level": "error", "exception": {"values": rv}}  # type: Event
+
+        _tag_task_context()
+
+        sentry_sdk.capture_event(event, hint=hint)
+
+
+def _tag_task_context():
+    # type: () -> None
+    from pyspark.taskcontext import TaskContext
+
+    scope = sentry_sdk.get_isolation_scope()
+
+    @scope.add_event_processor
+    def process_event(event, hint):
+        # type: (Event, Hint) -> Optional[Event]
+        with capture_internal_exceptions():
+            integration = sentry_sdk.get_client().get_integration(
+                SparkWorkerIntegration
+            )
+            task_context = TaskContext.get()
+
+            if integration is None or task_context is None:
+                return event
+
+            event.setdefault("tags", {}).setdefault(
+                "stageId", str(task_context.stageId())
+            )
+            event["tags"].setdefault("partitionId", str(task_context.partitionId()))
+            event["tags"].setdefault("attemptNumber", str(task_context.attemptNumber()))
+            event["tags"].setdefault("taskAttemptId", str(task_context.taskAttemptId()))
+
+            if task_context._localProperties:
+                if "sentry_app_name" in task_context._localProperties:
+                    event["tags"].setdefault(
+                        "app_name", task_context._localProperties["sentry_app_name"]
+                    )
+                    event["tags"].setdefault(
+                        "application_id",
+                        task_context._localProperties["sentry_application_id"],
+                    )
+
+                if "callSite.short" in task_context._localProperties:
+                    event.setdefault("extra", {}).setdefault(
+                        "callSite", task_context._localProperties["callSite.short"]
+                    )
+
+        return event
+
+
+def _sentry_worker_main(*args, **kwargs):
+    # type: (*Optional[Any], **Optional[Any]) -> None
+    import pyspark.worker as original_worker
+
+    try:
+        original_worker.main(*args, **kwargs)
+    except SystemExit:
+        if sentry_sdk.get_client().get_integration(SparkWorkerIntegration) is not None:
+            exc_info = sys.exc_info()
+            with capture_internal_exceptions():
+                _capture_exception(exc_info)
diff --git a/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/sqlalchemy.py b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/sqlalchemy.py
new file mode 100644
index 00000000..068d3730
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/sqlalchemy.py
@@ -0,0 +1,146 @@
+from sentry_sdk.consts import SPANSTATUS, SPANDATA
+from sentry_sdk.integrations import _check_minimum_version, Integration, DidNotEnable
+from sentry_sdk.tracing_utils import add_query_source, record_sql_queries
+from sentry_sdk.utils import (
+    capture_internal_exceptions,
+    ensure_integration_enabled,
+    parse_version,
+)
+
+try:
+    from sqlalchemy.engine import Engine  # type: ignore
+    from sqlalchemy.event import listen  # type: ignore
+    from sqlalchemy import __version__ as SQLALCHEMY_VERSION  # type: ignore
+except ImportError:
+    raise DidNotEnable("SQLAlchemy not installed.")
+
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+    from typing import Any
+    from typing import ContextManager
+    from typing import Optional
+
+    from sentry_sdk.tracing import Span
+
+
+class SqlalchemyIntegration(Integration):
+    identifier = "sqlalchemy"
+    origin = f"auto.db.{identifier}"
+
+    @staticmethod
+    def setup_once():
+        # type: () -> None
+        version = parse_version(SQLALCHEMY_VERSION)
+        _check_minimum_version(SqlalchemyIntegration, version)
+
+        listen(Engine, "before_cursor_execute", _before_cursor_execute)
+        listen(Engine, "after_cursor_execute", _after_cursor_execute)
+        listen(Engine, "handle_error", _handle_error)
+
+
+@ensure_integration_enabled(SqlalchemyIntegration)
+def _before_cursor_execute(
+    conn, cursor, statement, parameters, context, executemany, *args
+):
+    # type: (Any, Any, Any, Any, Any, bool, *Any) -> None
+    ctx_mgr = record_sql_queries(
+        cursor,
+        statement,
+        parameters,
+        paramstyle=context and context.dialect and context.dialect.paramstyle or None,
+        executemany=executemany,
+        span_origin=SqlalchemyIntegration.origin,
+    )
+    context._sentry_sql_span_manager = ctx_mgr
+
+    span = ctx_mgr.__enter__()
+
+    if span is not None:
+        _set_db_data(span, conn)
+        context._sentry_sql_span = span
+
+
+@ensure_integration_enabled(SqlalchemyIntegration)
+def _after_cursor_execute(conn, cursor, statement, parameters, context, *args):
+    # type: (Any, Any, Any, Any, Any, *Any) -> None
+    ctx_mgr = getattr(
+        context, "_sentry_sql_span_manager", None
+    )  # type: Optional[ContextManager[Any]]
+
+    if ctx_mgr is not None:
+        context._sentry_sql_span_manager = None
+        ctx_mgr.__exit__(None, None, None)
+
+    span = getattr(context, "_sentry_sql_span", None)  # type: Optional[Span]
+    if span is not None:
+        with capture_internal_exceptions():
+            add_query_source(span)
+
+
+def _handle_error(context, *args):
+    # type: (Any, *Any) -> None
+    execution_context = context.execution_context
+    if execution_context is None:
+        return
+
+    span = getattr(execution_context, "_sentry_sql_span", None)  # type: Optional[Span]
+
+    if span is not None:
+        span.set_status(SPANSTATUS.INTERNAL_ERROR)
+
+    # _after_cursor_execute does not get called for crashing SQL stmts. Judging
+    # from SQLAlchemy codebase it does seem like any error coming into this
+    # handler is going to be fatal.
+    ctx_mgr = getattr(
+        execution_context, "_sentry_sql_span_manager", None
+    )  # type: Optional[ContextManager[Any]]
+
+    if ctx_mgr is not None:
+        execution_context._sentry_sql_span_manager = None
+        ctx_mgr.__exit__(None, None, None)
+
+
+# See: https://docs.sqlalchemy.org/en/20/dialects/index.html
+def _get_db_system(name):
+    # type: (str) -> Optional[str]
+    name = str(name)
+
+    if "sqlite" in name:
+        return "sqlite"
+
+    if "postgres" in name:
+        return "postgresql"
+
+    if "mariadb" in name:
+        return "mariadb"
+
+    if "mysql" in name:
+        return "mysql"
+
+    if "oracle" in name:
+        return "oracle"
+
+    return None
+
+
+def _set_db_data(span, conn):
+    # type: (Span, Any) -> None
+    db_system = _get_db_system(conn.engine.name)
+    if db_system is not None:
+        span.set_data(SPANDATA.DB_SYSTEM, db_system)
+
+    if conn.engine.url is None:
+        return
+
+    db_name = conn.engine.url.database
+    if db_name is not None:
+        span.set_data(SPANDATA.DB_NAME, db_name)
+
+    server_address = conn.engine.url.host
+    if server_address is not None:
+        span.set_data(SPANDATA.SERVER_ADDRESS, server_address)
+
+    server_port = conn.engine.url.port
+    if server_port is not None:
+        span.set_data(SPANDATA.SERVER_PORT, server_port)
diff --git a/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/starlette.py b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/starlette.py
new file mode 100644
index 00000000..dbb47dff
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/starlette.py
@@ -0,0 +1,740 @@
+import asyncio
+import functools
+import warnings
+from collections.abc import Set
+from copy import deepcopy
+
+import sentry_sdk
+from sentry_sdk.consts import OP
+from sentry_sdk.integrations import (
+    DidNotEnable,
+    Integration,
+    _DEFAULT_FAILED_REQUEST_STATUS_CODES,
+)
+from sentry_sdk.integrations._wsgi_common import (
+    DEFAULT_HTTP_METHODS_TO_CAPTURE,
+    HttpCodeRangeContainer,
+    _is_json_content_type,
+    request_body_within_bounds,
+)
+from sentry_sdk.integrations.asgi import SentryAsgiMiddleware
+from sentry_sdk.scope import should_send_default_pii
+from sentry_sdk.tracing import (
+    SOURCE_FOR_STYLE,
+    TransactionSource,
+)
+from sentry_sdk.utils import (
+    AnnotatedValue,
+    capture_internal_exceptions,
+    ensure_integration_enabled,
+    event_from_exception,
+    logger,
+    parse_version,
+    transaction_from_function,
+)
+
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+    from typing import Any, Awaitable, Callable, Container, Dict, Optional, Tuple, Union
+
+    from sentry_sdk._types import Event, HttpStatusCodeRange
+
+try:
+    import starlette  # type: ignore
+    from starlette import __version__ as STARLETTE_VERSION
+    from starlette.applications import Starlette  # type: ignore
+    from starlette.datastructures import UploadFile  # type: ignore
+    from starlette.middleware import Middleware  # type: ignore
+    from starlette.middleware.authentication import (  # type: ignore
+        AuthenticationMiddleware,
+    )
+    from starlette.requests import Request  # type: ignore
+    from starlette.routing import Match  # type: ignore
+    from starlette.types import ASGIApp, Receive, Scope as StarletteScope, Send  # type: ignore
+except ImportError:
+    raise DidNotEnable("Starlette is not installed")
+
+try:
+    # Starlette 0.20
+    from starlette.middleware.exceptions import ExceptionMiddleware  # type: ignore
+except ImportError:
+    # Startlette 0.19.1
+    from starlette.exceptions import ExceptionMiddleware  # type: ignore
+
+try:
+    # Optional dependency of Starlette to parse form data.
+    try:
+        # python-multipart 0.0.13 and later
+        import python_multipart as multipart  # type: ignore
+    except ImportError:
+        # python-multipart 0.0.12 and earlier
+        import multipart  # type: ignore
+except ImportError:
+    multipart = None
+
+
+_DEFAULT_TRANSACTION_NAME = "generic Starlette request"
+
+TRANSACTION_STYLE_VALUES = ("endpoint", "url")
+
+
+class StarletteIntegration(Integration):
+    identifier = "starlette"
+    origin = f"auto.http.{identifier}"
+
+    transaction_style = ""
+
+    def __init__(
+        self,
+        transaction_style="url",  # type: str
+        failed_request_status_codes=_DEFAULT_FAILED_REQUEST_STATUS_CODES,  # type: Union[Set[int], list[HttpStatusCodeRange], None]
+        middleware_spans=True,  # type: bool
+        http_methods_to_capture=DEFAULT_HTTP_METHODS_TO_CAPTURE,  # type: tuple[str, ...]
+    ):
+        # type: (...) -> None
+        if transaction_style not in TRANSACTION_STYLE_VALUES:
+            raise ValueError(
+                "Invalid value for transaction_style: %s (must be in %s)"
+                % (transaction_style, TRANSACTION_STYLE_VALUES)
+            )
+        self.transaction_style = transaction_style
+        self.middleware_spans = middleware_spans
+        self.http_methods_to_capture = tuple(map(str.upper, http_methods_to_capture))
+
+        if isinstance(failed_request_status_codes, Set):
+            self.failed_request_status_codes = (
+                failed_request_status_codes
+            )  # type: Container[int]
+        else:
+            warnings.warn(
+                "Passing a list or None for failed_request_status_codes is deprecated. "
+                "Please pass a set of int instead.",
+                DeprecationWarning,
+                stacklevel=2,
+            )
+
+            if failed_request_status_codes is None:
+                self.failed_request_status_codes = _DEFAULT_FAILED_REQUEST_STATUS_CODES
+            else:
+                self.failed_request_status_codes = HttpCodeRangeContainer(
+                    failed_request_status_codes
+                )
+
+    @staticmethod
+    def setup_once():
+        # type: () -> None
+        version = parse_version(STARLETTE_VERSION)
+
+        if version is None:
+            raise DidNotEnable(
+                "Unparsable Starlette version: {}".format(STARLETTE_VERSION)
+            )
+
+        patch_middlewares()
+        patch_asgi_app()
+        patch_request_response()
+
+        if version >= (0, 24):
+            patch_templates()
+
+
+def _enable_span_for_middleware(middleware_class):
+    # type: (Any) -> type
+    old_call = middleware_class.__call__
+
+    async def _create_span_call(app, scope, receive, send, **kwargs):
+        # type: (Any, Dict[str, Any], Callable[[], Awaitable[Dict[str, Any]]], Callable[[Dict[str, Any]], Awaitable[None]], Any) -> None
+        integration = sentry_sdk.get_client().get_integration(StarletteIntegration)
+        if integration is None or not integration.middleware_spans:
+            return await old_call(app, scope, receive, send, **kwargs)
+
+        middleware_name = app.__class__.__name__
+
+        # Update transaction name with middleware name
+        name, source = _get_transaction_from_middleware(app, scope, integration)
+        if name is not None:
+            sentry_sdk.get_current_scope().set_transaction_name(
+                name,
+                source=source,
+            )
+
+        with sentry_sdk.start_span(
+            op=OP.MIDDLEWARE_STARLETTE,
+            name=middleware_name,
+            origin=StarletteIntegration.origin,
+        ) as middleware_span:
+            middleware_span.set_tag("starlette.middleware_name", middleware_name)
+
+            # Creating spans for the "receive" callback
+            async def _sentry_receive(*args, **kwargs):
+                # type: (*Any, **Any) -> Any
+                with sentry_sdk.start_span(
+                    op=OP.MIDDLEWARE_STARLETTE_RECEIVE,
+                    name=getattr(receive, "__qualname__", str(receive)),
+                    origin=StarletteIntegration.origin,
+                ) as span:
+                    span.set_tag("starlette.middleware_name", middleware_name)
+                    return await receive(*args, **kwargs)
+
+            receive_name = getattr(receive, "__name__", str(receive))
+            receive_patched = receive_name == "_sentry_receive"
+            new_receive = _sentry_receive if not receive_patched else receive
+
+            # Creating spans for the "send" callback
+            async def _sentry_send(*args, **kwargs):
+                # type: (*Any, **Any) -> Any
+                with sentry_sdk.start_span(
+                    op=OP.MIDDLEWARE_STARLETTE_SEND,
+                    name=getattr(send, "__qualname__", str(send)),
+                    origin=StarletteIntegration.origin,
+                ) as span:
+                    span.set_tag("starlette.middleware_name", middleware_name)
+                    return await send(*args, **kwargs)
+
+            send_name = getattr(send, "__name__", str(send))
+            send_patched = send_name == "_sentry_send"
+            new_send = _sentry_send if not send_patched else send
+
+            return await old_call(app, scope, new_receive, new_send, **kwargs)
+
+    not_yet_patched = old_call.__name__ not in [
+        "_create_span_call",
+        "_sentry_authenticationmiddleware_call",
+        "_sentry_exceptionmiddleware_call",
+    ]
+
+    if not_yet_patched:
+        middleware_class.__call__ = _create_span_call
+
+    return middleware_class
+
+
+@ensure_integration_enabled(StarletteIntegration)
+def _capture_exception(exception, handled=False):
+    # type: (BaseException, **Any) -> None
+    event, hint = event_from_exception(
+        exception,
+        client_options=sentry_sdk.get_client().options,
+        mechanism={"type": StarletteIntegration.identifier, "handled": handled},
+    )
+
+    sentry_sdk.capture_event(event, hint=hint)
+
+
+def patch_exception_middleware(middleware_class):
+    # type: (Any) -> None
+    """
+    Capture all exceptions in Starlette app and
+    also extract user information.
+    """
+    old_middleware_init = middleware_class.__init__
+
+    not_yet_patched = "_sentry_middleware_init" not in str(old_middleware_init)
+
+    if not_yet_patched:
+
+        def _sentry_middleware_init(self, *args, **kwargs):
+            # type: (Any, Any, Any) -> None
+            old_middleware_init(self, *args, **kwargs)
+
+            # Patch existing exception handlers
+            old_handlers = self._exception_handlers.copy()
+
+            async def _sentry_patched_exception_handler(self, *args, **kwargs):
+                # type: (Any, Any, Any) -> None
+                integration = sentry_sdk.get_client().get_integration(
+                    StarletteIntegration
+                )
+
+                exp = args[0]
+
+                if integration is not None:
+                    is_http_server_error = (
+                        hasattr(exp, "status_code")
+                        and isinstance(exp.status_code, int)
+                        and exp.status_code in integration.failed_request_status_codes
+                    )
+                    if is_http_server_error:
+                        _capture_exception(exp, handled=True)
+
+                # Find a matching handler
+                old_handler = None
+                for cls in type(exp).__mro__:
+                    if cls in old_handlers:
+                        old_handler = old_handlers[cls]
+                        break
+
+                if old_handler is None:
+                    return
+
+                if _is_async_callable(old_handler):
+                    return await old_handler(self, *args, **kwargs)
+                else:
+                    return old_handler(self, *args, **kwargs)
+
+            for key in self._exception_handlers.keys():
+                self._exception_handlers[key] = _sentry_patched_exception_handler
+
+        middleware_class.__init__ = _sentry_middleware_init
+
+        old_call = middleware_class.__call__
+
+        async def _sentry_exceptionmiddleware_call(self, scope, receive, send):
+            # type: (Dict[str, Any], Dict[str, Any], Callable[[], Awaitable[Dict[str, Any]]], Callable[[Dict[str, Any]], Awaitable[None]]) -> None
+            # Also add the user (that was eventually set by be Authentication middle
+            # that was called before this middleware). This is done because the authentication
+            # middleware sets the user in the scope and then (in the same function)
+            # calls this exception middelware. In case there is no exception (or no handler
+            # for the type of exception occuring) then the exception bubbles up and setting the
+            # user information into the sentry scope is done in auth middleware and the
+            # ASGI middleware will then send everything to Sentry and this is fine.
+            # But if there is an exception happening that the exception middleware here
+            # has a handler for, it will send the exception directly to Sentry, so we need
+            # the user information right now.
+            # This is why we do it here.
+            _add_user_to_sentry_scope(scope)
+            await old_call(self, scope, receive, send)
+
+        middleware_class.__call__ = _sentry_exceptionmiddleware_call
+
+
+@ensure_integration_enabled(StarletteIntegration)
+def _add_user_to_sentry_scope(scope):
+    # type: (Dict[str, Any]) -> None
+    """
+    Extracts user information from the ASGI scope and
+    adds it to Sentry's scope.
+    """
+    if "user" not in scope:
+        return
+
+    if not should_send_default_pii():
+        return
+
+    user_info = {}  # type: Dict[str, Any]
+    starlette_user = scope["user"]
+
+    username = getattr(starlette_user, "username", None)
+    if username:
+        user_info.setdefault("username", starlette_user.username)
+
+    user_id = getattr(starlette_user, "id", None)
+    if user_id:
+        user_info.setdefault("id", starlette_user.id)
+
+    email = getattr(starlette_user, "email", None)
+    if email:
+        user_info.setdefault("email", starlette_user.email)
+
+    sentry_scope = sentry_sdk.get_isolation_scope()
+    sentry_scope.user = user_info
+
+
+def patch_authentication_middleware(middleware_class):
+    # type: (Any) -> None
+    """
+    Add user information to Sentry scope.
+    """
+    old_call = middleware_class.__call__
+
+    not_yet_patched = "_sentry_authenticationmiddleware_call" not in str(old_call)
+
+    if not_yet_patched:
+
+        async def _sentry_authenticationmiddleware_call(self, scope, receive, send):
+            # type: (Dict[str, Any], Dict[str, Any], Callable[[], Awaitable[Dict[str, Any]]], Callable[[Dict[str, Any]], Awaitable[None]]) -> None
+            await old_call(self, scope, receive, send)
+            _add_user_to_sentry_scope(scope)
+
+        middleware_class.__call__ = _sentry_authenticationmiddleware_call
+
+
+def patch_middlewares():
+    # type: () -> None
+    """
+    Patches Starlettes `Middleware` class to record
+    spans for every middleware invoked.
+    """
+    old_middleware_init = Middleware.__init__
+
+    not_yet_patched = "_sentry_middleware_init" not in str(old_middleware_init)
+
+    if not_yet_patched:
+
+        def _sentry_middleware_init(self, cls, *args, **kwargs):
+            # type: (Any, Any, Any, Any) -> None
+            if cls == SentryAsgiMiddleware:
+                return old_middleware_init(self, cls, *args, **kwargs)
+
+            span_enabled_cls = _enable_span_for_middleware(cls)
+            old_middleware_init(self, span_enabled_cls, *args, **kwargs)
+
+            if cls == AuthenticationMiddleware:
+                patch_authentication_middleware(cls)
+
+            if cls == ExceptionMiddleware:
+                patch_exception_middleware(cls)
+
+        Middleware.__init__ = _sentry_middleware_init
+
+
+def patch_asgi_app():
+    # type: () -> None
+    """
+    Instrument Starlette ASGI app using the SentryAsgiMiddleware.
+    """
+    old_app = Starlette.__call__
+
+    async def _sentry_patched_asgi_app(self, scope, receive, send):
+        # type: (Starlette, StarletteScope, Receive, Send) -> None
+        integration = sentry_sdk.get_client().get_integration(StarletteIntegration)
+        if integration is None:
+            return await old_app(self, scope, receive, send)
+
+        middleware = SentryAsgiMiddleware(
+            lambda *a, **kw: old_app(self, *a, **kw),
+            mechanism_type=StarletteIntegration.identifier,
+            transaction_style=integration.transaction_style,
+            span_origin=StarletteIntegration.origin,
+            http_methods_to_capture=(
+                integration.http_methods_to_capture
+                if integration
+                else DEFAULT_HTTP_METHODS_TO_CAPTURE
+            ),
+        )
+
+        middleware.__call__ = middleware._run_asgi3
+        return await middleware(scope, receive, send)
+
+    Starlette.__call__ = _sentry_patched_asgi_app
+
+
+# This was vendored in from Starlette to support Starlette 0.19.1 because
+# this function was only introduced in 0.20.x
+def _is_async_callable(obj):
+    # type: (Any) -> bool
+    while isinstance(obj, functools.partial):
+        obj = obj.func
+
+    return asyncio.iscoroutinefunction(obj) or (
+        callable(obj) and asyncio.iscoroutinefunction(obj.__call__)
+    )
+
+
+def patch_request_response():
+    # type: () -> None
+    old_request_response = starlette.routing.request_response
+
+    def _sentry_request_response(func):
+        # type: (Callable[[Any], Any]) -> ASGIApp
+        old_func = func
+
+        is_coroutine = _is_async_callable(old_func)
+        if is_coroutine:
+
+            async def _sentry_async_func(*args, **kwargs):
+                # type: (*Any, **Any) -> Any
+                integration = sentry_sdk.get_client().get_integration(
+                    StarletteIntegration
+                )
+                if integration is None:
+                    return await old_func(*args, **kwargs)
+
+                request = args[0]
+
+                _set_transaction_name_and_source(
+                    sentry_sdk.get_current_scope(),
+                    integration.transaction_style,
+                    request,
+                )
+
+                sentry_scope = sentry_sdk.get_isolation_scope()
+                extractor = StarletteRequestExtractor(request)
+                info = await extractor.extract_request_info()
+
+                def _make_request_event_processor(req, integration):
+                    # type: (Any, Any) -> Callable[[Event, dict[str, Any]], Event]
+                    def event_processor(event, hint):
+                        # type: (Event, Dict[str, Any]) -> Event
+
+                        # Add info from request to event
+                        request_info = event.get("request", {})
+                        if info:
+                            if "cookies" in info:
+                                request_info["cookies"] = info["cookies"]
+                            if "data" in info:
+                                request_info["data"] = info["data"]
+                        event["request"] = deepcopy(request_info)
+
+                        return event
+
+                    return event_processor
+
+                sentry_scope._name = StarletteIntegration.identifier
+                sentry_scope.add_event_processor(
+                    _make_request_event_processor(request, integration)
+                )
+
+                return await old_func(*args, **kwargs)
+
+            func = _sentry_async_func
+
+        else:
+
+            @functools.wraps(old_func)
+            def _sentry_sync_func(*args, **kwargs):
+                # type: (*Any, **Any) -> Any
+                integration = sentry_sdk.get_client().get_integration(
+                    StarletteIntegration
+                )
+                if integration is None:
+                    return old_func(*args, **kwargs)
+
+                current_scope = sentry_sdk.get_current_scope()
+                if current_scope.transaction is not None:
+                    current_scope.transaction.update_active_thread()
+
+                sentry_scope = sentry_sdk.get_isolation_scope()
+                if sentry_scope.profile is not None:
+                    sentry_scope.profile.update_active_thread_id()
+
+                request = args[0]
+
+                _set_transaction_name_and_source(
+                    sentry_scope, integration.transaction_style, request
+                )
+
+                extractor = StarletteRequestExtractor(request)
+                cookies = extractor.extract_cookies_from_request()
+
+                def _make_request_event_processor(req, integration):
+                    # type: (Any, Any) -> Callable[[Event, dict[str, Any]], Event]
+                    def event_processor(event, hint):
+                        # type: (Event, dict[str, Any]) -> Event
+
+                        # Extract information from request
+                        request_info = event.get("request", {})
+                        if cookies:
+                            request_info["cookies"] = cookies
+
+                        event["request"] = deepcopy(request_info)
+
+                        return event
+
+                    return event_processor
+
+                sentry_scope._name = StarletteIntegration.identifier
+                sentry_scope.add_event_processor(
+                    _make_request_event_processor(request, integration)
+                )
+
+                return old_func(*args, **kwargs)
+
+            func = _sentry_sync_func
+
+        return old_request_response(func)
+
+    starlette.routing.request_response = _sentry_request_response
+
+
+def patch_templates():
+    # type: () -> None
+
+    # If markupsafe is not installed, then Jinja2 is not installed
+    # (markupsafe is a dependency of Jinja2)
+    # In this case we do not need to patch the Jinja2Templates class
+    try:
+        from markupsafe import Markup
+    except ImportError:
+        return  # Nothing to do
+
+    from starlette.templating import Jinja2Templates  # type: ignore
+
+    old_jinja2templates_init = Jinja2Templates.__init__
+
+    not_yet_patched = "_sentry_jinja2templates_init" not in str(
+        old_jinja2templates_init
+    )
+
+    if not_yet_patched:
+
+        def _sentry_jinja2templates_init(self, *args, **kwargs):
+            # type: (Jinja2Templates, *Any, **Any) -> None
+            def add_sentry_trace_meta(request):
+                # type: (Request) -> Dict[str, Any]
+                trace_meta = Markup(
+                    sentry_sdk.get_current_scope().trace_propagation_meta()
+                )
+                return {
+                    "sentry_trace_meta": trace_meta,
+                }
+
+            kwargs.setdefault("context_processors", [])
+
+            if add_sentry_trace_meta not in kwargs["context_processors"]:
+                kwargs["context_processors"].append(add_sentry_trace_meta)
+
+            return old_jinja2templates_init(self, *args, **kwargs)
+
+        Jinja2Templates.__init__ = _sentry_jinja2templates_init
+
+
+class StarletteRequestExtractor:
+    """
+    Extracts useful information from the Starlette request
+    (like form data or cookies) and adds it to the Sentry event.
+    """
+
+    request = None  # type: Request
+
+    def __init__(self, request):
+        # type: (StarletteRequestExtractor, Request) -> None
+        self.request = request
+
+    def extract_cookies_from_request(self):
+        # type: (StarletteRequestExtractor) -> Optional[Dict[str, Any]]
+        cookies = None  # type: Optional[Dict[str, Any]]
+        if should_send_default_pii():
+            cookies = self.cookies()
+
+        return cookies
+
+    async def extract_request_info(self):
+        # type: (StarletteRequestExtractor) -> Optional[Dict[str, Any]]
+        client = sentry_sdk.get_client()
+
+        request_info = {}  # type: Dict[str, Any]
+
+        with capture_internal_exceptions():
+            # Add cookies
+            if should_send_default_pii():
+                request_info["cookies"] = self.cookies()
+
+            # If there is no body, just return the cookies
+            content_length = await self.content_length()
+            if not content_length:
+                return request_info
+
+            # Add annotation if body is too big
+            if content_length and not request_body_within_bounds(
+                client, content_length
+            ):
+                request_info["data"] = AnnotatedValue.removed_because_over_size_limit()
+                return request_info
+
+            # Add JSON body, if it is a JSON request
+            json = await self.json()
+            if json:
+                request_info["data"] = json
+                return request_info
+
+            # Add form as key/value pairs, if request has form data
+            form = await self.form()
+            if form:
+                form_data = {}
+                for key, val in form.items():
+                    is_file = isinstance(val, UploadFile)
+                    form_data[key] = (
+                        val
+                        if not is_file
+                        else AnnotatedValue.removed_because_raw_data()
+                    )
+
+                request_info["data"] = form_data
+                return request_info
+
+            # Raw data, do not add body just an annotation
+            request_info["data"] = AnnotatedValue.removed_because_raw_data()
+            return request_info
+
+    async def content_length(self):
+        # type: (StarletteRequestExtractor) -> Optional[int]
+        if "content-length" in self.request.headers:
+            return int(self.request.headers["content-length"])
+
+        return None
+
+    def cookies(self):
+        # type: (StarletteRequestExtractor) -> Dict[str, Any]
+        return self.request.cookies
+
+    async def form(self):
+        # type: (StarletteRequestExtractor) -> Any
+        if multipart is None:
+            return None
+
+        # Parse the body first to get it cached, as Starlette does not cache form() as it
+        # does with body() and json() https://github.com/encode/starlette/discussions/1933
+        # Calling `.form()` without calling `.body()` first will
+        # potentially break the users project.
+        await self.request.body()
+
+        return await self.request.form()
+
+    def is_json(self):
+        # type: (StarletteRequestExtractor) -> bool
+        return _is_json_content_type(self.request.headers.get("content-type"))
+
+    async def json(self):
+        # type: (StarletteRequestExtractor) -> Optional[Dict[str, Any]]
+        if not self.is_json():
+            return None
+
+        return await self.request.json()
+
+
+def _transaction_name_from_router(scope):
+    # type: (StarletteScope) -> Optional[str]
+    router = scope.get("router")
+    if not router:
+        return None
+
+    for route in router.routes:
+        match = route.matches(scope)
+        if match[0] == Match.FULL:
+            try:
+                return route.path
+            except AttributeError:
+                # routes added via app.host() won't have a path attribute
+                return scope.get("path")
+
+    return None
+
+
+def _set_transaction_name_and_source(scope, transaction_style, request):
+    # type: (sentry_sdk.Scope, str, Any) -> None
+    name = None
+    source = SOURCE_FOR_STYLE[transaction_style]
+
+    if transaction_style == "endpoint":
+        endpoint = request.scope.get("endpoint")
+        if endpoint:
+            name = transaction_from_function(endpoint) or None
+
+    elif transaction_style == "url":
+        name = _transaction_name_from_router(request.scope)
+
+    if name is None:
+        name = _DEFAULT_TRANSACTION_NAME
+        source = TransactionSource.ROUTE
+
+    scope.set_transaction_name(name, source=source)
+    logger.debug(
+        "[Starlette] Set transaction name and source on scope: %s / %s", name, source
+    )
+
+
+def _get_transaction_from_middleware(app, asgi_scope, integration):
+    # type: (Any, Dict[str, Any], StarletteIntegration) -> Tuple[Optional[str], Optional[str]]
+    name = None
+    source = None
+
+    if integration.transaction_style == "endpoint":
+        name = transaction_from_function(app.__class__)
+        source = TransactionSource.COMPONENT
+    elif integration.transaction_style == "url":
+        name = _transaction_name_from_router(asgi_scope)
+        source = TransactionSource.ROUTE
+
+    return name, source
diff --git a/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/starlite.py b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/starlite.py
new file mode 100644
index 00000000..24707a18
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/starlite.py
@@ -0,0 +1,292 @@
+import sentry_sdk
+from sentry_sdk.consts import OP
+from sentry_sdk.integrations import DidNotEnable, Integration
+from sentry_sdk.integrations.asgi import SentryAsgiMiddleware
+from sentry_sdk.scope import should_send_default_pii
+from sentry_sdk.tracing import SOURCE_FOR_STYLE, TransactionSource
+from sentry_sdk.utils import (
+    ensure_integration_enabled,
+    event_from_exception,
+    transaction_from_function,
+)
+
+try:
+    from starlite import Request, Starlite, State  # type: ignore
+    from starlite.handlers.base import BaseRouteHandler  # type: ignore
+    from starlite.middleware import DefineMiddleware  # type: ignore
+    from starlite.plugins.base import get_plugin_for_value  # type: ignore
+    from starlite.routes.http import HTTPRoute  # type: ignore
+    from starlite.utils import ConnectionDataExtractor, is_async_callable, Ref  # type: ignore
+    from pydantic import BaseModel  # type: ignore
+except ImportError:
+    raise DidNotEnable("Starlite is not installed")
+
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+    from typing import Any, Optional, Union
+    from starlite.types import (  # type: ignore
+        ASGIApp,
+        Hint,
+        HTTPReceiveMessage,
+        HTTPScope,
+        Message,
+        Middleware,
+        Receive,
+        Scope as StarliteScope,
+        Send,
+        WebSocketReceiveMessage,
+    )
+    from starlite import MiddlewareProtocol
+    from sentry_sdk._types import Event
+
+
+_DEFAULT_TRANSACTION_NAME = "generic Starlite request"
+
+
+class StarliteIntegration(Integration):
+    identifier = "starlite"
+    origin = f"auto.http.{identifier}"
+
+    @staticmethod
+    def setup_once():
+        # type: () -> None
+        patch_app_init()
+        patch_middlewares()
+        patch_http_route_handle()
+
+
+class SentryStarliteASGIMiddleware(SentryAsgiMiddleware):
+    def __init__(self, app, span_origin=StarliteIntegration.origin):
+        # type: (ASGIApp, str) -> None
+        super().__init__(
+            app=app,
+            unsafe_context_data=False,
+            transaction_style="endpoint",
+            mechanism_type="asgi",
+            span_origin=span_origin,
+        )
+
+
+def patch_app_init():
+    # type: () -> None
+    """
+    Replaces the Starlite class's `__init__` function in order to inject `after_exception` handlers and set the
+    `SentryStarliteASGIMiddleware` as the outmost middleware in the stack.
+    See:
+    - https://starlite-api.github.io/starlite/usage/0-the-starlite-app/5-application-hooks/#after-exception
+    - https://starlite-api.github.io/starlite/usage/7-middleware/0-middleware-intro/
+    """
+    old__init__ = Starlite.__init__
+
+    @ensure_integration_enabled(StarliteIntegration, old__init__)
+    def injection_wrapper(self, *args, **kwargs):
+        # type: (Starlite, *Any, **Any) -> None
+        after_exception = kwargs.pop("after_exception", [])
+        kwargs.update(
+            after_exception=[
+                exception_handler,
+                *(
+                    after_exception
+                    if isinstance(after_exception, list)
+                    else [after_exception]
+                ),
+            ]
+        )
+
+        SentryStarliteASGIMiddleware.__call__ = SentryStarliteASGIMiddleware._run_asgi3  # type: ignore
+        middleware = kwargs.get("middleware") or []
+        kwargs["middleware"] = [SentryStarliteASGIMiddleware, *middleware]
+        old__init__(self, *args, **kwargs)
+
+    Starlite.__init__ = injection_wrapper
+
+
+def patch_middlewares():
+    # type: () -> None
+    old_resolve_middleware_stack = BaseRouteHandler.resolve_middleware
+
+    @ensure_integration_enabled(StarliteIntegration, old_resolve_middleware_stack)
+    def resolve_middleware_wrapper(self):
+        # type: (BaseRouteHandler) -> list[Middleware]
+        return [
+            enable_span_for_middleware(middleware)
+            for middleware in old_resolve_middleware_stack(self)
+        ]
+
+    BaseRouteHandler.resolve_middleware = resolve_middleware_wrapper
+
+
+def enable_span_for_middleware(middleware):
+    # type: (Middleware) -> Middleware
+    if (
+        not hasattr(middleware, "__call__")  # noqa: B004
+        or middleware is SentryStarliteASGIMiddleware
+    ):
+        return middleware
+
+    if isinstance(middleware, DefineMiddleware):
+        old_call = middleware.middleware.__call__  # type: ASGIApp
+    else:
+        old_call = middleware.__call__
+
+    async def _create_span_call(self, scope, receive, send):
+        # type: (MiddlewareProtocol, StarliteScope, Receive, Send) -> None
+        if sentry_sdk.get_client().get_integration(StarliteIntegration) is None:
+            return await old_call(self, scope, receive, send)
+
+        middleware_name = self.__class__.__name__
+        with sentry_sdk.start_span(
+            op=OP.MIDDLEWARE_STARLITE,
+            name=middleware_name,
+            origin=StarliteIntegration.origin,
+        ) as middleware_span:
+            middleware_span.set_tag("starlite.middleware_name", middleware_name)
+
+            # Creating spans for the "receive" callback
+            async def _sentry_receive(*args, **kwargs):
+                # type: (*Any, **Any) -> Union[HTTPReceiveMessage, WebSocketReceiveMessage]
+                if sentry_sdk.get_client().get_integration(StarliteIntegration) is None:
+                    return await receive(*args, **kwargs)
+                with sentry_sdk.start_span(
+                    op=OP.MIDDLEWARE_STARLITE_RECEIVE,
+                    name=getattr(receive, "__qualname__", str(receive)),
+                    origin=StarliteIntegration.origin,
+                ) as span:
+                    span.set_tag("starlite.middleware_name", middleware_name)
+                    return await receive(*args, **kwargs)
+
+            receive_name = getattr(receive, "__name__", str(receive))
+            receive_patched = receive_name == "_sentry_receive"
+            new_receive = _sentry_receive if not receive_patched else receive
+
+            # Creating spans for the "send" callback
+            async def _sentry_send(message):
+                # type: (Message) -> None
+                if sentry_sdk.get_client().get_integration(StarliteIntegration) is None:
+                    return await send(message)
+                with sentry_sdk.start_span(
+                    op=OP.MIDDLEWARE_STARLITE_SEND,
+                    name=getattr(send, "__qualname__", str(send)),
+                    origin=StarliteIntegration.origin,
+                ) as span:
+                    span.set_tag("starlite.middleware_name", middleware_name)
+                    return await send(message)
+
+            send_name = getattr(send, "__name__", str(send))
+            send_patched = send_name == "_sentry_send"
+            new_send = _sentry_send if not send_patched else send
+
+            return await old_call(self, scope, new_receive, new_send)
+
+    not_yet_patched = old_call.__name__ not in ["_create_span_call"]
+
+    if not_yet_patched:
+        if isinstance(middleware, DefineMiddleware):
+            middleware.middleware.__call__ = _create_span_call
+        else:
+            middleware.__call__ = _create_span_call
+
+    return middleware
+
+
+def patch_http_route_handle():
+    # type: () -> None
+    old_handle = HTTPRoute.handle
+
+    async def handle_wrapper(self, scope, receive, send):
+        # type: (HTTPRoute, HTTPScope, Receive, Send) -> None
+        if sentry_sdk.get_client().get_integration(StarliteIntegration) is None:
+            return await old_handle(self, scope, receive, send)
+
+        sentry_scope = sentry_sdk.get_isolation_scope()
+        request = scope["app"].request_class(
+            scope=scope, receive=receive, send=send
+        )  # type: Request[Any, Any]
+        extracted_request_data = ConnectionDataExtractor(
+            parse_body=True, parse_query=True
+        )(request)
+        body = extracted_request_data.pop("body")
+
+        request_data = await body
+
+        def event_processor(event, _):
+            # type: (Event, Hint) -> Event
+            route_handler = scope.get("route_handler")
+
+            request_info = event.get("request", {})
+            request_info["content_length"] = len(scope.get("_body", b""))
+            if should_send_default_pii():
+                request_info["cookies"] = extracted_request_data["cookies"]
+            if request_data is not None:
+                request_info["data"] = request_data
+
+            func = None
+            if route_handler.name is not None:
+                tx_name = route_handler.name
+            elif isinstance(route_handler.fn, Ref):
+                func = route_handler.fn.value
+            else:
+                func = route_handler.fn
+            if func is not None:
+                tx_name = transaction_from_function(func)
+
+            tx_info = {"source": SOURCE_FOR_STYLE["endpoint"]}
+
+            if not tx_name:
+                tx_name = _DEFAULT_TRANSACTION_NAME
+                tx_info = {"source": TransactionSource.ROUTE}
+
+            event.update(
+                {
+                    "request": request_info,
+                    "transaction": tx_name,
+                    "transaction_info": tx_info,
+                }
+            )
+            return event
+
+        sentry_scope._name = StarliteIntegration.identifier
+        sentry_scope.add_event_processor(event_processor)
+
+        return await old_handle(self, scope, receive, send)
+
+    HTTPRoute.handle = handle_wrapper
+
+
+def retrieve_user_from_scope(scope):
+    # type: (StarliteScope) -> Optional[dict[str, Any]]
+    scope_user = scope.get("user")
+    if not scope_user:
+        return None
+    if isinstance(scope_user, dict):
+        return scope_user
+    if isinstance(scope_user, BaseModel):
+        return scope_user.dict()
+    if hasattr(scope_user, "asdict"):  # dataclasses
+        return scope_user.asdict()
+
+    plugin = get_plugin_for_value(scope_user)
+    if plugin and not is_async_callable(plugin.to_dict):
+        return plugin.to_dict(scope_user)
+
+    return None
+
+
+@ensure_integration_enabled(StarliteIntegration)
+def exception_handler(exc, scope, _):
+    # type: (Exception, StarliteScope, State) -> None
+    user_info = None  # type: Optional[dict[str, Any]]
+    if should_send_default_pii():
+        user_info = retrieve_user_from_scope(scope)
+    if user_info and isinstance(user_info, dict):
+        sentry_scope = sentry_sdk.get_isolation_scope()
+        sentry_scope.set_user(user_info)
+
+    event, hint = event_from_exception(
+        exc,
+        client_options=sentry_sdk.get_client().options,
+        mechanism={"type": StarliteIntegration.identifier, "handled": False},
+    )
+
+    sentry_sdk.capture_event(event, hint=hint)
diff --git a/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/statsig.py b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/statsig.py
new file mode 100644
index 00000000..1d84eb8a
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/statsig.py
@@ -0,0 +1,37 @@
+from functools import wraps
+from typing import Any, TYPE_CHECKING
+
+from sentry_sdk.feature_flags import add_feature_flag
+from sentry_sdk.integrations import Integration, DidNotEnable, _check_minimum_version
+from sentry_sdk.utils import parse_version
+
+try:
+    from statsig import statsig as statsig_module
+    from statsig.version import __version__ as STATSIG_VERSION
+except ImportError:
+    raise DidNotEnable("statsig is not installed")
+
+if TYPE_CHECKING:
+    from statsig.statsig_user import StatsigUser
+
+
+class StatsigIntegration(Integration):
+    identifier = "statsig"
+
+    @staticmethod
+    def setup_once():
+        # type: () -> None
+        version = parse_version(STATSIG_VERSION)
+        _check_minimum_version(StatsigIntegration, version, "statsig")
+
+        # Wrap and patch evaluation method(s) in the statsig module
+        old_check_gate = statsig_module.check_gate
+
+        @wraps(old_check_gate)
+        def sentry_check_gate(user, gate, *args, **kwargs):
+            # type: (StatsigUser, str, *Any, **Any) -> Any
+            enabled = old_check_gate(user, gate, *args, **kwargs)
+            add_feature_flag(gate, enabled)
+            return enabled
+
+        statsig_module.check_gate = sentry_check_gate
diff --git a/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/stdlib.py b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/stdlib.py
new file mode 100644
index 00000000..d388c5bc
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/stdlib.py
@@ -0,0 +1,265 @@
+import os
+import subprocess
+import sys
+import platform
+from http.client import HTTPConnection
+
+import sentry_sdk
+from sentry_sdk.consts import OP, SPANDATA
+from sentry_sdk.integrations import Integration
+from sentry_sdk.scope import add_global_event_processor
+from sentry_sdk.tracing_utils import EnvironHeaders, should_propagate_trace
+from sentry_sdk.utils import (
+    SENSITIVE_DATA_SUBSTITUTE,
+    capture_internal_exceptions,
+    ensure_integration_enabled,
+    is_sentry_url,
+    logger,
+    safe_repr,
+    parse_url,
+)
+
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+    from typing import Any
+    from typing import Callable
+    from typing import Dict
+    from typing import Optional
+    from typing import List
+
+    from sentry_sdk._types import Event, Hint
+
+
+_RUNTIME_CONTEXT = {
+    "name": platform.python_implementation(),
+    "version": "%s.%s.%s" % (sys.version_info[:3]),
+    "build": sys.version,
+}  # type: dict[str, object]
+
+
+class StdlibIntegration(Integration):
+    identifier = "stdlib"
+
+    @staticmethod
+    def setup_once():
+        # type: () -> None
+        _install_httplib()
+        _install_subprocess()
+
+        @add_global_event_processor
+        def add_python_runtime_context(event, hint):
+            # type: (Event, Hint) -> Optional[Event]
+            if sentry_sdk.get_client().get_integration(StdlibIntegration) is not None:
+                contexts = event.setdefault("contexts", {})
+                if isinstance(contexts, dict) and "runtime" not in contexts:
+                    contexts["runtime"] = _RUNTIME_CONTEXT
+
+            return event
+
+
+def _install_httplib():
+    # type: () -> None
+    real_putrequest = HTTPConnection.putrequest
+    real_getresponse = HTTPConnection.getresponse
+
+    def putrequest(self, method, url, *args, **kwargs):
+        # type: (HTTPConnection, str, str, *Any, **Any) -> Any
+        host = self.host
+        port = self.port
+        default_port = self.default_port
+
+        client = sentry_sdk.get_client()
+        if client.get_integration(StdlibIntegration) is None or is_sentry_url(
+            client, host
+        ):
+            return real_putrequest(self, method, url, *args, **kwargs)
+
+        real_url = url
+        if real_url is None or not real_url.startswith(("http://", "https://")):
+            real_url = "%s://%s%s%s" % (
+                default_port == 443 and "https" or "http",
+                host,
+                port != default_port and ":%s" % port or "",
+                url,
+            )
+
+        parsed_url = None
+        with capture_internal_exceptions():
+            parsed_url = parse_url(real_url, sanitize=False)
+
+        span = sentry_sdk.start_span(
+            op=OP.HTTP_CLIENT,
+            name="%s %s"
+            % (method, parsed_url.url if parsed_url else SENSITIVE_DATA_SUBSTITUTE),
+            origin="auto.http.stdlib.httplib",
+        )
+        span.set_data(SPANDATA.HTTP_METHOD, method)
+        if parsed_url is not None:
+            span.set_data("url", parsed_url.url)
+            span.set_data(SPANDATA.HTTP_QUERY, parsed_url.query)
+            span.set_data(SPANDATA.HTTP_FRAGMENT, parsed_url.fragment)
+
+        rv = real_putrequest(self, method, url, *args, **kwargs)
+
+        if should_propagate_trace(client, real_url):
+            for (
+                key,
+                value,
+            ) in sentry_sdk.get_current_scope().iter_trace_propagation_headers(
+                span=span
+            ):
+                logger.debug(
+                    "[Tracing] Adding `{key}` header {value} to outgoing request to {real_url}.".format(
+                        key=key, value=value, real_url=real_url
+                    )
+                )
+                self.putheader(key, value)
+
+        self._sentrysdk_span = span  # type: ignore[attr-defined]
+
+        return rv
+
+    def getresponse(self, *args, **kwargs):
+        # type: (HTTPConnection, *Any, **Any) -> Any
+        span = getattr(self, "_sentrysdk_span", None)
+
+        if span is None:
+            return real_getresponse(self, *args, **kwargs)
+
+        try:
+            rv = real_getresponse(self, *args, **kwargs)
+
+            span.set_http_status(int(rv.status))
+            span.set_data("reason", rv.reason)
+        finally:
+            span.finish()
+
+        return rv
+
+    HTTPConnection.putrequest = putrequest  # type: ignore[method-assign]
+    HTTPConnection.getresponse = getresponse  # type: ignore[method-assign]
+
+
+def _init_argument(args, kwargs, name, position, setdefault_callback=None):
+    # type: (List[Any], Dict[Any, Any], str, int, Optional[Callable[[Any], Any]]) -> Any
+    """
+    given (*args, **kwargs) of a function call, retrieve (and optionally set a
+    default for) an argument by either name or position.
+
+    This is useful for wrapping functions with complex type signatures and
+    extracting a few arguments without needing to redefine that function's
+    entire type signature.
+    """
+
+    if name in kwargs:
+        rv = kwargs[name]
+        if setdefault_callback is not None:
+            rv = setdefault_callback(rv)
+        if rv is not None:
+            kwargs[name] = rv
+    elif position < len(args):
+        rv = args[position]
+        if setdefault_callback is not None:
+            rv = setdefault_callback(rv)
+        if rv is not None:
+            args[position] = rv
+    else:
+        rv = setdefault_callback and setdefault_callback(None)
+        if rv is not None:
+            kwargs[name] = rv
+
+    return rv
+
+
+def _install_subprocess():
+    # type: () -> None
+    old_popen_init = subprocess.Popen.__init__
+
+    @ensure_integration_enabled(StdlibIntegration, old_popen_init)
+    def sentry_patched_popen_init(self, *a, **kw):
+        # type: (subprocess.Popen[Any], *Any, **Any) -> None
+        # Convert from tuple to list to be able to set values.
+        a = list(a)
+
+        args = _init_argument(a, kw, "args", 0) or []
+        cwd = _init_argument(a, kw, "cwd", 9)
+
+        # if args is not a list or tuple (and e.g. some iterator instead),
+        # let's not use it at all. There are too many things that can go wrong
+        # when trying to collect an iterator into a list and setting that list
+        # into `a` again.
+        #
+        # Also invocations where `args` is not a sequence are not actually
+        # legal. They just happen to work under CPython.
+        description = None
+
+        if isinstance(args, (list, tuple)) and len(args) < 100:
+            with capture_internal_exceptions():
+                description = " ".join(map(str, args))
+
+        if description is None:
+            description = safe_repr(args)
+
+        env = None
+
+        with sentry_sdk.start_span(
+            op=OP.SUBPROCESS,
+            name=description,
+            origin="auto.subprocess.stdlib.subprocess",
+        ) as span:
+            for k, v in sentry_sdk.get_current_scope().iter_trace_propagation_headers(
+                span=span
+            ):
+                if env is None:
+                    env = _init_argument(
+                        a,
+                        kw,
+                        "env",
+                        10,
+                        lambda x: dict(x if x is not None else os.environ),
+                    )
+                env["SUBPROCESS_" + k.upper().replace("-", "_")] = v
+
+            if cwd:
+                span.set_data("subprocess.cwd", cwd)
+
+            rv = old_popen_init(self, *a, **kw)
+
+            span.set_tag("subprocess.pid", self.pid)
+            return rv
+
+    subprocess.Popen.__init__ = sentry_patched_popen_init  # type: ignore
+
+    old_popen_wait = subprocess.Popen.wait
+
+    @ensure_integration_enabled(StdlibIntegration, old_popen_wait)
+    def sentry_patched_popen_wait(self, *a, **kw):
+        # type: (subprocess.Popen[Any], *Any, **Any) -> Any
+        with sentry_sdk.start_span(
+            op=OP.SUBPROCESS_WAIT,
+            origin="auto.subprocess.stdlib.subprocess",
+        ) as span:
+            span.set_tag("subprocess.pid", self.pid)
+            return old_popen_wait(self, *a, **kw)
+
+    subprocess.Popen.wait = sentry_patched_popen_wait  # type: ignore
+
+    old_popen_communicate = subprocess.Popen.communicate
+
+    @ensure_integration_enabled(StdlibIntegration, old_popen_communicate)
+    def sentry_patched_popen_communicate(self, *a, **kw):
+        # type: (subprocess.Popen[Any], *Any, **Any) -> Any
+        with sentry_sdk.start_span(
+            op=OP.SUBPROCESS_COMMUNICATE,
+            origin="auto.subprocess.stdlib.subprocess",
+        ) as span:
+            span.set_tag("subprocess.pid", self.pid)
+            return old_popen_communicate(self, *a, **kw)
+
+    subprocess.Popen.communicate = sentry_patched_popen_communicate  # type: ignore
+
+
+def get_subprocess_traceparent_headers():
+    # type: () -> EnvironHeaders
+    return EnvironHeaders(os.environ, prefix="SUBPROCESS_")
diff --git a/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/strawberry.py b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/strawberry.py
new file mode 100644
index 00000000..ae7d2730
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/strawberry.py
@@ -0,0 +1,393 @@
+import functools
+import hashlib
+from inspect import isawaitable
+
+import sentry_sdk
+from sentry_sdk.consts import OP
+from sentry_sdk.integrations import _check_minimum_version, Integration, DidNotEnable
+from sentry_sdk.integrations.logging import ignore_logger
+from sentry_sdk.scope import should_send_default_pii
+from sentry_sdk.tracing import TransactionSource
+from sentry_sdk.utils import (
+    capture_internal_exceptions,
+    ensure_integration_enabled,
+    event_from_exception,
+    logger,
+    package_version,
+    _get_installed_modules,
+)
+
+try:
+    from functools import cached_property
+except ImportError:
+    # The strawberry integration requires Python 3.8+. functools.cached_property
+    # was added in 3.8, so this check is technically not needed, but since this
+    # is an auto-enabling integration, we might get to executing this import in
+    # lower Python versions, so we need to deal with it.
+    raise DidNotEnable("strawberry-graphql integration requires Python 3.8 or newer")
+
+try:
+    from strawberry import Schema
+    from strawberry.extensions import SchemaExtension
+    from strawberry.extensions.tracing.utils import (
+        should_skip_tracing as strawberry_should_skip_tracing,
+    )
+    from strawberry.http import async_base_view, sync_base_view
+except ImportError:
+    raise DidNotEnable("strawberry-graphql is not installed")
+
+try:
+    from strawberry.extensions.tracing import (
+        SentryTracingExtension as StrawberrySentryAsyncExtension,
+        SentryTracingExtensionSync as StrawberrySentrySyncExtension,
+    )
+except ImportError:
+    StrawberrySentryAsyncExtension = None
+    StrawberrySentrySyncExtension = None
+
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+    from typing import Any, Callable, Generator, List, Optional
+    from graphql import GraphQLError, GraphQLResolveInfo
+    from strawberry.http import GraphQLHTTPResponse
+    from strawberry.types import ExecutionContext
+    from sentry_sdk._types import Event, EventProcessor
+
+
+ignore_logger("strawberry.execution")
+
+
+class StrawberryIntegration(Integration):
+    identifier = "strawberry"
+    origin = f"auto.graphql.{identifier}"
+
+    def __init__(self, async_execution=None):
+        # type: (Optional[bool]) -> None
+        if async_execution not in (None, False, True):
+            raise ValueError(
+                'Invalid value for async_execution: "{}" (must be bool)'.format(
+                    async_execution
+                )
+            )
+        self.async_execution = async_execution
+
+    @staticmethod
+    def setup_once():
+        # type: () -> None
+        version = package_version("strawberry-graphql")
+        _check_minimum_version(StrawberryIntegration, version, "strawberry-graphql")
+
+        _patch_schema_init()
+        _patch_views()
+
+
+def _patch_schema_init():
+    # type: () -> None
+    old_schema_init = Schema.__init__
+
+    @functools.wraps(old_schema_init)
+    def _sentry_patched_schema_init(self, *args, **kwargs):
+        # type: (Schema, Any, Any) -> None
+        integration = sentry_sdk.get_client().get_integration(StrawberryIntegration)
+        if integration is None:
+            return old_schema_init(self, *args, **kwargs)
+
+        extensions = kwargs.get("extensions") or []
+
+        if integration.async_execution is not None:
+            should_use_async_extension = integration.async_execution
+        else:
+            # try to figure it out ourselves
+            should_use_async_extension = _guess_if_using_async(extensions)
+
+            logger.info(
+                "Assuming strawberry is running %s. If not, initialize it as StrawberryIntegration(async_execution=%s).",
+                "async" if should_use_async_extension else "sync",
+                "False" if should_use_async_extension else "True",
+            )
+
+        # remove the built in strawberry sentry extension, if present
+        extensions = [
+            extension
+            for extension in extensions
+            if extension
+            not in (StrawberrySentryAsyncExtension, StrawberrySentrySyncExtension)
+        ]
+
+        # add our extension
+        extensions.append(
+            SentryAsyncExtension if should_use_async_extension else SentrySyncExtension
+        )
+
+        kwargs["extensions"] = extensions
+
+        return old_schema_init(self, *args, **kwargs)
+
+    Schema.__init__ = _sentry_patched_schema_init  # type: ignore[method-assign]
+
+
+class SentryAsyncExtension(SchemaExtension):
+    def __init__(
+        self,
+        *,
+        execution_context=None,
+    ):
+        # type: (Any, Optional[ExecutionContext]) -> None
+        if execution_context:
+            self.execution_context = execution_context
+
+    @cached_property
+    def _resource_name(self):
+        # type: () -> str
+        query_hash = self.hash_query(self.execution_context.query)  # type: ignore
+
+        if self.execution_context.operation_name:
+            return "{}:{}".format(self.execution_context.operation_name, query_hash)
+
+        return query_hash
+
+    def hash_query(self, query):
+        # type: (str) -> str
+        return hashlib.md5(query.encode("utf-8")).hexdigest()
+
+    def on_operation(self):
+        # type: () -> Generator[None, None, None]
+        self._operation_name = self.execution_context.operation_name
+
+        operation_type = "query"
+        op = OP.GRAPHQL_QUERY
+
+        if self.execution_context.query is None:
+            self.execution_context.query = ""
+
+        if self.execution_context.query.strip().startswith("mutation"):
+            operation_type = "mutation"
+            op = OP.GRAPHQL_MUTATION
+        elif self.execution_context.query.strip().startswith("subscription"):
+            operation_type = "subscription"
+            op = OP.GRAPHQL_SUBSCRIPTION
+
+        description = operation_type
+        if self._operation_name:
+            description += " {}".format(self._operation_name)
+
+        sentry_sdk.add_breadcrumb(
+            category="graphql.operation",
+            data={
+                "operation_name": self._operation_name,
+                "operation_type": operation_type,
+            },
+        )
+
+        scope = sentry_sdk.get_isolation_scope()
+        event_processor = _make_request_event_processor(self.execution_context)
+        scope.add_event_processor(event_processor)
+
+        span = sentry_sdk.get_current_span()
+        if span:
+            self.graphql_span = span.start_child(
+                op=op,
+                name=description,
+                origin=StrawberryIntegration.origin,
+            )
+        else:
+            self.graphql_span = sentry_sdk.start_span(
+                op=op,
+                name=description,
+                origin=StrawberryIntegration.origin,
+            )
+
+        self.graphql_span.set_data("graphql.operation.type", operation_type)
+        self.graphql_span.set_data("graphql.operation.name", self._operation_name)
+        self.graphql_span.set_data("graphql.document", self.execution_context.query)
+        self.graphql_span.set_data("graphql.resource_name", self._resource_name)
+
+        yield
+
+        transaction = self.graphql_span.containing_transaction
+        if transaction and self.execution_context.operation_name:
+            transaction.name = self.execution_context.operation_name
+            transaction.source = TransactionSource.COMPONENT
+            transaction.op = op
+
+        self.graphql_span.finish()
+
+    def on_validate(self):
+        # type: () -> Generator[None, None, None]
+        self.validation_span = self.graphql_span.start_child(
+            op=OP.GRAPHQL_VALIDATE,
+            name="validation",
+            origin=StrawberryIntegration.origin,
+        )
+
+        yield
+
+        self.validation_span.finish()
+
+    def on_parse(self):
+        # type: () -> Generator[None, None, None]
+        self.parsing_span = self.graphql_span.start_child(
+            op=OP.GRAPHQL_PARSE,
+            name="parsing",
+            origin=StrawberryIntegration.origin,
+        )
+
+        yield
+
+        self.parsing_span.finish()
+
+    def should_skip_tracing(self, _next, info):
+        # type: (Callable[[Any, GraphQLResolveInfo, Any, Any], Any], GraphQLResolveInfo) -> bool
+        return strawberry_should_skip_tracing(_next, info)
+
+    async def _resolve(self, _next, root, info, *args, **kwargs):
+        # type: (Callable[[Any, GraphQLResolveInfo, Any, Any], Any], Any, GraphQLResolveInfo, str, Any) -> Any
+        result = _next(root, info, *args, **kwargs)
+
+        if isawaitable(result):
+            result = await result
+
+        return result
+
+    async def resolve(self, _next, root, info, *args, **kwargs):
+        # type: (Callable[[Any, GraphQLResolveInfo, Any, Any], Any], Any, GraphQLResolveInfo, str, Any) -> Any
+        if self.should_skip_tracing(_next, info):
+            return await self._resolve(_next, root, info, *args, **kwargs)
+
+        field_path = "{}.{}".format(info.parent_type, info.field_name)
+
+        with self.graphql_span.start_child(
+            op=OP.GRAPHQL_RESOLVE,
+            name="resolving {}".format(field_path),
+            origin=StrawberryIntegration.origin,
+        ) as span:
+            span.set_data("graphql.field_name", info.field_name)
+            span.set_data("graphql.parent_type", info.parent_type.name)
+            span.set_data("graphql.field_path", field_path)
+            span.set_data("graphql.path", ".".join(map(str, info.path.as_list())))
+
+            return await self._resolve(_next, root, info, *args, **kwargs)
+
+
+class SentrySyncExtension(SentryAsyncExtension):
+    def resolve(self, _next, root, info, *args, **kwargs):
+        # type: (Callable[[Any, Any, Any, Any], Any], Any, GraphQLResolveInfo, str, Any) -> Any
+        if self.should_skip_tracing(_next, info):
+            return _next(root, info, *args, **kwargs)
+
+        field_path = "{}.{}".format(info.parent_type, info.field_name)
+
+        with self.graphql_span.start_child(
+            op=OP.GRAPHQL_RESOLVE,
+            name="resolving {}".format(field_path),
+            origin=StrawberryIntegration.origin,
+        ) as span:
+            span.set_data("graphql.field_name", info.field_name)
+            span.set_data("graphql.parent_type", info.parent_type.name)
+            span.set_data("graphql.field_path", field_path)
+            span.set_data("graphql.path", ".".join(map(str, info.path.as_list())))
+
+            return _next(root, info, *args, **kwargs)
+
+
+def _patch_views():
+    # type: () -> None
+    old_async_view_handle_errors = async_base_view.AsyncBaseHTTPView._handle_errors
+    old_sync_view_handle_errors = sync_base_view.SyncBaseHTTPView._handle_errors
+
+    def _sentry_patched_async_view_handle_errors(self, errors, response_data):
+        # type: (Any, List[GraphQLError], GraphQLHTTPResponse) -> None
+        old_async_view_handle_errors(self, errors, response_data)
+        _sentry_patched_handle_errors(self, errors, response_data)
+
+    def _sentry_patched_sync_view_handle_errors(self, errors, response_data):
+        # type: (Any, List[GraphQLError], GraphQLHTTPResponse) -> None
+        old_sync_view_handle_errors(self, errors, response_data)
+        _sentry_patched_handle_errors(self, errors, response_data)
+
+    @ensure_integration_enabled(StrawberryIntegration)
+    def _sentry_patched_handle_errors(self, errors, response_data):
+        # type: (Any, List[GraphQLError], GraphQLHTTPResponse) -> None
+        if not errors:
+            return
+
+        scope = sentry_sdk.get_isolation_scope()
+        event_processor = _make_response_event_processor(response_data)
+        scope.add_event_processor(event_processor)
+
+        with capture_internal_exceptions():
+            for error in errors:
+                event, hint = event_from_exception(
+                    error,
+                    client_options=sentry_sdk.get_client().options,
+                    mechanism={
+                        "type": StrawberryIntegration.identifier,
+                        "handled": False,
+                    },
+                )
+                sentry_sdk.capture_event(event, hint=hint)
+
+    async_base_view.AsyncBaseHTTPView._handle_errors = (  # type: ignore[method-assign]
+        _sentry_patched_async_view_handle_errors
+    )
+    sync_base_view.SyncBaseHTTPView._handle_errors = (  # type: ignore[method-assign]
+        _sentry_patched_sync_view_handle_errors
+    )
+
+
+def _make_request_event_processor(execution_context):
+    # type: (ExecutionContext) -> EventProcessor
+
+    def inner(event, hint):
+        # type: (Event, dict[str, Any]) -> Event
+        with capture_internal_exceptions():
+            if should_send_default_pii():
+                request_data = event.setdefault("request", {})
+                request_data["api_target"] = "graphql"
+
+                if not request_data.get("data"):
+                    data = {"query": execution_context.query}  # type: dict[str, Any]
+                    if execution_context.variables:
+                        data["variables"] = execution_context.variables
+                    if execution_context.operation_name:
+                        data["operationName"] = execution_context.operation_name
+
+                    request_data["data"] = data
+
+            else:
+                try:
+                    del event["request"]["data"]
+                except (KeyError, TypeError):
+                    pass
+
+        return event
+
+    return inner
+
+
+def _make_response_event_processor(response_data):
+    # type: (GraphQLHTTPResponse) -> EventProcessor
+
+    def inner(event, hint):
+        # type: (Event, dict[str, Any]) -> Event
+        with capture_internal_exceptions():
+            if should_send_default_pii():
+                contexts = event.setdefault("contexts", {})
+                contexts["response"] = {"data": response_data}
+
+        return event
+
+    return inner
+
+
+def _guess_if_using_async(extensions):
+    # type: (List[SchemaExtension]) -> bool
+    if StrawberrySentryAsyncExtension in extensions:
+        return True
+    elif StrawberrySentrySyncExtension in extensions:
+        return False
+
+    return bool(
+        {"starlette", "starlite", "litestar", "fastapi"} & set(_get_installed_modules())
+    )
diff --git a/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/sys_exit.py b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/sys_exit.py
new file mode 100644
index 00000000..2341e113
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/sys_exit.py
@@ -0,0 +1,70 @@
+import functools
+import sys
+
+import sentry_sdk
+from sentry_sdk.utils import capture_internal_exceptions, event_from_exception
+from sentry_sdk.integrations import Integration
+from sentry_sdk._types import TYPE_CHECKING
+
+if TYPE_CHECKING:
+    from collections.abc import Callable
+    from typing import NoReturn, Union
+
+
+class SysExitIntegration(Integration):
+    """Captures sys.exit calls and sends them as events to Sentry.
+
+    By default, SystemExit exceptions are not captured by the SDK. Enabling this integration will capture SystemExit
+    exceptions generated by sys.exit calls and send them to Sentry.
+
+    This integration, in its default configuration, only captures the sys.exit call if the exit code is a non-zero and
+    non-None value (unsuccessful exits). Pass `capture_successful_exits=True` to capture successful exits as well.
+    Note that the integration does not capture SystemExit exceptions raised outside a call to sys.exit.
+    """
+
+    identifier = "sys_exit"
+
+    def __init__(self, *, capture_successful_exits=False):
+        # type: (bool) -> None
+        self._capture_successful_exits = capture_successful_exits
+
+    @staticmethod
+    def setup_once():
+        # type: () -> None
+        SysExitIntegration._patch_sys_exit()
+
+    @staticmethod
+    def _patch_sys_exit():
+        # type: () -> None
+        old_exit = sys.exit  # type: Callable[[Union[str, int, None]], NoReturn]
+
+        @functools.wraps(old_exit)
+        def sentry_patched_exit(__status=0):
+            # type: (Union[str, int, None]) -> NoReturn
+            # @ensure_integration_enabled ensures that this is non-None
+            integration = sentry_sdk.get_client().get_integration(SysExitIntegration)
+            if integration is None:
+                old_exit(__status)
+
+            try:
+                old_exit(__status)
+            except SystemExit as e:
+                with capture_internal_exceptions():
+                    if integration._capture_successful_exits or __status not in (
+                        0,
+                        None,
+                    ):
+                        _capture_exception(e)
+                raise e
+
+        sys.exit = sentry_patched_exit
+
+
+def _capture_exception(exc):
+    # type: (SystemExit) -> None
+    event, hint = event_from_exception(
+        exc,
+        client_options=sentry_sdk.get_client().options,
+        mechanism={"type": SysExitIntegration.identifier, "handled": False},
+    )
+    sentry_sdk.capture_event(event, hint=hint)
diff --git a/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/threading.py b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/threading.py
new file mode 100644
index 00000000..5de736e2
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/threading.py
@@ -0,0 +1,121 @@
+import sys
+from functools import wraps
+from threading import Thread, current_thread
+
+import sentry_sdk
+from sentry_sdk.integrations import Integration
+from sentry_sdk.scope import use_isolation_scope, use_scope
+from sentry_sdk.utils import (
+    event_from_exception,
+    capture_internal_exceptions,
+    logger,
+    reraise,
+)
+
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+    from typing import Any
+    from typing import TypeVar
+    from typing import Callable
+    from typing import Optional
+
+    from sentry_sdk._types import ExcInfo
+
+    F = TypeVar("F", bound=Callable[..., Any])
+
+
+class ThreadingIntegration(Integration):
+    identifier = "threading"
+
+    def __init__(self, propagate_hub=None, propagate_scope=True):
+        # type: (Optional[bool], bool) -> None
+        if propagate_hub is not None:
+            logger.warning(
+                "Deprecated: propagate_hub is deprecated. This will be removed in the future."
+            )
+
+        # Note: propagate_hub did not have any effect on propagation of scope data
+        # scope data was always propagated no matter what the value of propagate_hub was
+        # This is why the default for propagate_scope is True
+
+        self.propagate_scope = propagate_scope
+
+        if propagate_hub is not None:
+            self.propagate_scope = propagate_hub
+
+    @staticmethod
+    def setup_once():
+        # type: () -> None
+        old_start = Thread.start
+
+        @wraps(old_start)
+        def sentry_start(self, *a, **kw):
+            # type: (Thread, *Any, **Any) -> Any
+            integration = sentry_sdk.get_client().get_integration(ThreadingIntegration)
+            if integration is None:
+                return old_start(self, *a, **kw)
+
+            if integration.propagate_scope:
+                isolation_scope = sentry_sdk.get_isolation_scope()
+                current_scope = sentry_sdk.get_current_scope()
+            else:
+                isolation_scope = None
+                current_scope = None
+
+            # Patching instance methods in `start()` creates a reference cycle if
+            # done in a naive way. See
+            # https://github.com/getsentry/sentry-python/pull/434
+            #
+            # In threading module, using current_thread API will access current thread instance
+            # without holding it to avoid a reference cycle in an easier way.
+            with capture_internal_exceptions():
+                new_run = _wrap_run(
+                    isolation_scope,
+                    current_scope,
+                    getattr(self.run, "__func__", self.run),
+                )
+                self.run = new_run  # type: ignore
+
+            return old_start(self, *a, **kw)
+
+        Thread.start = sentry_start  # type: ignore
+
+
+def _wrap_run(isolation_scope_to_use, current_scope_to_use, old_run_func):
+    # type: (Optional[sentry_sdk.Scope], Optional[sentry_sdk.Scope], F) -> F
+    @wraps(old_run_func)
+    def run(*a, **kw):
+        # type: (*Any, **Any) -> Any
+        def _run_old_run_func():
+            # type: () -> Any
+            try:
+                self = current_thread()
+                return old_run_func(self, *a, **kw)
+            except Exception:
+                reraise(*_capture_exception())
+
+        if isolation_scope_to_use is not None and current_scope_to_use is not None:
+            with use_isolation_scope(isolation_scope_to_use):
+                with use_scope(current_scope_to_use):
+                    return _run_old_run_func()
+        else:
+            return _run_old_run_func()
+
+    return run  # type: ignore
+
+
+def _capture_exception():
+    # type: () -> ExcInfo
+    exc_info = sys.exc_info()
+
+    client = sentry_sdk.get_client()
+    if client.get_integration(ThreadingIntegration) is not None:
+        event, hint = event_from_exception(
+            exc_info,
+            client_options=client.options,
+            mechanism={"type": "threading", "handled": False},
+        )
+        sentry_sdk.capture_event(event, hint=hint)
+
+    return exc_info
diff --git a/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/tornado.py b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/tornado.py
new file mode 100644
index 00000000..3cd08752
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/tornado.py
@@ -0,0 +1,220 @@
+import weakref
+import contextlib
+from inspect import iscoroutinefunction
+
+import sentry_sdk
+from sentry_sdk.api import continue_trace
+from sentry_sdk.consts import OP
+from sentry_sdk.scope import should_send_default_pii
+from sentry_sdk.tracing import TransactionSource
+from sentry_sdk.utils import (
+    HAS_REAL_CONTEXTVARS,
+    CONTEXTVARS_ERROR_MESSAGE,
+    ensure_integration_enabled,
+    event_from_exception,
+    capture_internal_exceptions,
+    transaction_from_function,
+)
+from sentry_sdk.integrations import _check_minimum_version, Integration, DidNotEnable
+from sentry_sdk.integrations._wsgi_common import (
+    RequestExtractor,
+    _filter_headers,
+    _is_json_content_type,
+)
+from sentry_sdk.integrations.logging import ignore_logger
+
+try:
+    from tornado import version_info as TORNADO_VERSION
+    from tornado.web import RequestHandler, HTTPError
+    from tornado.gen import coroutine
+except ImportError:
+    raise DidNotEnable("Tornado not installed")
+
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+    from typing import Any
+    from typing import Optional
+    from typing import Dict
+    from typing import Callable
+    from typing import Generator
+
+    from sentry_sdk._types import Event, EventProcessor
+
+
+class TornadoIntegration(Integration):
+    identifier = "tornado"
+    origin = f"auto.http.{identifier}"
+
+    @staticmethod
+    def setup_once():
+        # type: () -> None
+        _check_minimum_version(TornadoIntegration, TORNADO_VERSION)
+
+        if not HAS_REAL_CONTEXTVARS:
+            # Tornado is async. We better have contextvars or we're going to leak
+            # state between requests.
+            raise DidNotEnable(
+                "The tornado integration for Sentry requires Python 3.7+ or the aiocontextvars package"
+                + CONTEXTVARS_ERROR_MESSAGE
+            )
+
+        ignore_logger("tornado.access")
+
+        old_execute = RequestHandler._execute
+
+        awaitable = iscoroutinefunction(old_execute)
+
+        if awaitable:
+            # Starting Tornado 6 RequestHandler._execute method is a standard Python coroutine (async/await)
+            # In that case our method should be a coroutine function too
+            async def sentry_execute_request_handler(self, *args, **kwargs):
+                # type: (RequestHandler, *Any, **Any) -> Any
+                with _handle_request_impl(self):
+                    return await old_execute(self, *args, **kwargs)
+
+        else:
+
+            @coroutine  # type: ignore
+            def sentry_execute_request_handler(self, *args, **kwargs):  # type: ignore
+                # type: (RequestHandler, *Any, **Any) -> Any
+                with _handle_request_impl(self):
+                    result = yield from old_execute(self, *args, **kwargs)
+                    return result
+
+        RequestHandler._execute = sentry_execute_request_handler
+
+        old_log_exception = RequestHandler.log_exception
+
+        def sentry_log_exception(self, ty, value, tb, *args, **kwargs):
+            # type: (Any, type, BaseException, Any, *Any, **Any) -> Optional[Any]
+            _capture_exception(ty, value, tb)
+            return old_log_exception(self, ty, value, tb, *args, **kwargs)
+
+        RequestHandler.log_exception = sentry_log_exception
+
+
+@contextlib.contextmanager
+def _handle_request_impl(self):
+    # type: (RequestHandler) -> Generator[None, None, None]
+    integration = sentry_sdk.get_client().get_integration(TornadoIntegration)
+
+    if integration is None:
+        yield
+
+    weak_handler = weakref.ref(self)
+
+    with sentry_sdk.isolation_scope() as scope:
+        headers = self.request.headers
+
+        scope.clear_breadcrumbs()
+        processor = _make_event_processor(weak_handler)
+        scope.add_event_processor(processor)
+
+        transaction = continue_trace(
+            headers,
+            op=OP.HTTP_SERVER,
+            # Like with all other integrations, this is our
+            # fallback transaction in case there is no route.
+            # sentry_urldispatcher_resolve is responsible for
+            # setting a transaction name later.
+            name="generic Tornado request",
+            source=TransactionSource.ROUTE,
+            origin=TornadoIntegration.origin,
+        )
+
+        with sentry_sdk.start_transaction(
+            transaction, custom_sampling_context={"tornado_request": self.request}
+        ):
+            yield
+
+
+@ensure_integration_enabled(TornadoIntegration)
+def _capture_exception(ty, value, tb):
+    # type: (type, BaseException, Any) -> None
+    if isinstance(value, HTTPError):
+        return
+
+    event, hint = event_from_exception(
+        (ty, value, tb),
+        client_options=sentry_sdk.get_client().options,
+        mechanism={"type": "tornado", "handled": False},
+    )
+
+    sentry_sdk.capture_event(event, hint=hint)
+
+
+def _make_event_processor(weak_handler):
+    # type: (Callable[[], RequestHandler]) -> EventProcessor
+    def tornado_processor(event, hint):
+        # type: (Event, dict[str, Any]) -> Event
+        handler = weak_handler()
+        if handler is None:
+            return event
+
+        request = handler.request
+
+        with capture_internal_exceptions():
+            method = getattr(handler, handler.request.method.lower())
+            event["transaction"] = transaction_from_function(method) or ""
+            event["transaction_info"] = {"source": TransactionSource.COMPONENT}
+
+        with capture_internal_exceptions():
+            extractor = TornadoRequestExtractor(request)
+            extractor.extract_into_event(event)
+
+            request_info = event["request"]
+
+            request_info["url"] = "%s://%s%s" % (
+                request.protocol,
+                request.host,
+                request.path,
+            )
+
+            request_info["query_string"] = request.query
+            request_info["method"] = request.method
+            request_info["env"] = {"REMOTE_ADDR": request.remote_ip}
+            request_info["headers"] = _filter_headers(dict(request.headers))
+
+        with capture_internal_exceptions():
+            if handler.current_user and should_send_default_pii():
+                event.setdefault("user", {}).setdefault("is_authenticated", True)
+
+        return event
+
+    return tornado_processor
+
+
+class TornadoRequestExtractor(RequestExtractor):
+    def content_length(self):
+        # type: () -> int
+        if self.request.body is None:
+            return 0
+        return len(self.request.body)
+
+    def cookies(self):
+        # type: () -> Dict[str, str]
+        return {k: v.value for k, v in self.request.cookies.items()}
+
+    def raw_data(self):
+        # type: () -> bytes
+        return self.request.body
+
+    def form(self):
+        # type: () -> Dict[str, Any]
+        return {
+            k: [v.decode("latin1", "replace") for v in vs]
+            for k, vs in self.request.body_arguments.items()
+        }
+
+    def is_json(self):
+        # type: () -> bool
+        return _is_json_content_type(self.request.headers.get("content-type"))
+
+    def files(self):
+        # type: () -> Dict[str, Any]
+        return {k: v[0] for k, v in self.request.files.items() if v}
+
+    def size_of_file(self, file):
+        # type: (Any) -> int
+        return len(file.body or ())
diff --git a/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/trytond.py b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/trytond.py
new file mode 100644
index 00000000..2c44c593
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/trytond.py
@@ -0,0 +1,50 @@
+import sentry_sdk
+from sentry_sdk.integrations import Integration
+from sentry_sdk.integrations.wsgi import SentryWsgiMiddleware
+from sentry_sdk.utils import ensure_integration_enabled, event_from_exception
+
+from trytond.exceptions import TrytonException  # type: ignore
+from trytond.wsgi import app  # type: ignore
+
+
+# TODO: trytond-worker, trytond-cron and trytond-admin intergations
+
+
+class TrytondWSGIIntegration(Integration):
+    identifier = "trytond_wsgi"
+    origin = f"auto.http.{identifier}"
+
+    def __init__(self):  # type: () -> None
+        pass
+
+    @staticmethod
+    def setup_once():  # type: () -> None
+        app.wsgi_app = SentryWsgiMiddleware(
+            app.wsgi_app,
+            span_origin=TrytondWSGIIntegration.origin,
+        )
+
+        @ensure_integration_enabled(TrytondWSGIIntegration)
+        def error_handler(e):  # type: (Exception) -> None
+            if isinstance(e, TrytonException):
+                return
+            else:
+                client = sentry_sdk.get_client()
+                event, hint = event_from_exception(
+                    e,
+                    client_options=client.options,
+                    mechanism={"type": "trytond", "handled": False},
+                )
+                sentry_sdk.capture_event(event, hint=hint)
+
+        # Expected error handlers signature was changed
+        # when the error_handler decorator was introduced
+        # in Tryton-5.4
+        if hasattr(app, "error_handler"):
+
+            @app.error_handler
+            def _(app, request, e):  # type: ignore
+                error_handler(e)
+
+        else:
+            app.error_handlers.append(error_handler)
diff --git a/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/typer.py b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/typer.py
new file mode 100644
index 00000000..8879d6d0
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/typer.py
@@ -0,0 +1,60 @@
+import sentry_sdk
+from sentry_sdk.utils import (
+    capture_internal_exceptions,
+    event_from_exception,
+)
+from sentry_sdk.integrations import Integration, DidNotEnable
+
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+    from typing import Callable
+    from typing import Any
+    from typing import Type
+    from typing import Optional
+
+    from types import TracebackType
+
+    Excepthook = Callable[
+        [Type[BaseException], BaseException, Optional[TracebackType]],
+        Any,
+    ]
+
+try:
+    import typer
+except ImportError:
+    raise DidNotEnable("Typer not installed")
+
+
+class TyperIntegration(Integration):
+    identifier = "typer"
+
+    @staticmethod
+    def setup_once():
+        # type: () -> None
+        typer.main.except_hook = _make_excepthook(typer.main.except_hook)  # type: ignore
+
+
+def _make_excepthook(old_excepthook):
+    # type: (Excepthook) -> Excepthook
+    def sentry_sdk_excepthook(type_, value, traceback):
+        # type: (Type[BaseException], BaseException, Optional[TracebackType]) -> None
+        integration = sentry_sdk.get_client().get_integration(TyperIntegration)
+
+        # Note: If we replace this with ensure_integration_enabled then
+        # we break the exceptiongroup backport;
+        # See: https://github.com/getsentry/sentry-python/issues/3097
+        if integration is None:
+            return old_excepthook(type_, value, traceback)
+
+        with capture_internal_exceptions():
+            event, hint = event_from_exception(
+                (type_, value, traceback),
+                client_options=sentry_sdk.get_client().options,
+                mechanism={"type": "typer", "handled": False},
+            )
+            sentry_sdk.capture_event(event, hint=hint)
+
+        return old_excepthook(type_, value, traceback)
+
+    return sentry_sdk_excepthook
diff --git a/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/unleash.py b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/unleash.py
new file mode 100644
index 00000000..873f36c6
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/unleash.py
@@ -0,0 +1,34 @@
+from functools import wraps
+from typing import Any
+
+import sentry_sdk
+from sentry_sdk.integrations import Integration, DidNotEnable
+
+try:
+    from UnleashClient import UnleashClient
+except ImportError:
+    raise DidNotEnable("UnleashClient is not installed")
+
+
+class UnleashIntegration(Integration):
+    identifier = "unleash"
+
+    @staticmethod
+    def setup_once():
+        # type: () -> None
+        # Wrap and patch evaluation methods (class methods)
+        old_is_enabled = UnleashClient.is_enabled
+
+        @wraps(old_is_enabled)
+        def sentry_is_enabled(self, feature, *args, **kwargs):
+            # type: (UnleashClient, str, *Any, **Any) -> Any
+            enabled = old_is_enabled(self, feature, *args, **kwargs)
+
+            # We have no way of knowing what type of unleash feature this is, so we have to treat
+            # it as a boolean / toggle feature.
+            flags = sentry_sdk.get_current_scope().flags
+            flags.set(feature, enabled)
+
+            return enabled
+
+        UnleashClient.is_enabled = sentry_is_enabled  # type: ignore
diff --git a/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/wsgi.py b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/wsgi.py
new file mode 100644
index 00000000..e628e50e
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/sentry_sdk/integrations/wsgi.py
@@ -0,0 +1,310 @@
+import sys
+from functools import partial
+
+import sentry_sdk
+from sentry_sdk._werkzeug import get_host, _get_headers
+from sentry_sdk.api import continue_trace
+from sentry_sdk.consts import OP
+from sentry_sdk.scope import should_send_default_pii
+from sentry_sdk.integrations._wsgi_common import (
+    DEFAULT_HTTP_METHODS_TO_CAPTURE,
+    _filter_headers,
+    nullcontext,
+)
+from sentry_sdk.sessions import track_session
+from sentry_sdk.scope import use_isolation_scope
+from sentry_sdk.tracing import Transaction, TransactionSource
+from sentry_sdk.utils import (
+    ContextVar,
+    capture_internal_exceptions,
+    event_from_exception,
+    reraise,
+)
+
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+    from typing import Callable
+    from typing import Dict
+    from typing import Iterator
+    from typing import Any
+    from typing import Tuple
+    from typing import Optional
+    from typing import TypeVar
+    from typing import Protocol
+
+    from sentry_sdk.utils import ExcInfo
+    from sentry_sdk._types import Event, EventProcessor
+
+    WsgiResponseIter = TypeVar("WsgiResponseIter")
+    WsgiResponseHeaders = TypeVar("WsgiResponseHeaders")
+    WsgiExcInfo = TypeVar("WsgiExcInfo")
+
+    class StartResponse(Protocol):
+        def __call__(self, status, response_headers, exc_info=None):  # type: ignore
+            # type: (str, WsgiResponseHeaders, Optional[WsgiExcInfo]) -> WsgiResponseIter
+            pass
+
+
+_wsgi_middleware_applied = ContextVar("sentry_wsgi_middleware_applied")
+
+
+def wsgi_decoding_dance(s, charset="utf-8", errors="replace"):
+    # type: (str, str, str) -> str
+    return s.encode("latin1").decode(charset, errors)
+
+
+def get_request_url(environ, use_x_forwarded_for=False):
+    # type: (Dict[str, str], bool) -> str
+    """Return the absolute URL without query string for the given WSGI
+    environment."""
+    script_name = environ.get("SCRIPT_NAME", "").rstrip("/")
+    path_info = environ.get("PATH_INFO", "").lstrip("/")
+    path = f"{script_name}/{path_info}"
+
+    return "%s://%s/%s" % (
+        environ.get("wsgi.url_scheme"),
+        get_host(environ, use_x_forwarded_for),
+        wsgi_decoding_dance(path).lstrip("/"),
+    )
+
+
+class SentryWsgiMiddleware:
+    __slots__ = (
+        "app",
+        "use_x_forwarded_for",
+        "span_origin",
+        "http_methods_to_capture",
+    )
+
+    def __init__(
+        self,
+        app,  # type: Callable[[Dict[str, str], Callable[..., Any]], Any]
+        use_x_forwarded_for=False,  # type: bool
+        span_origin="manual",  # type: str
+        http_methods_to_capture=DEFAULT_HTTP_METHODS_TO_CAPTURE,  # type: Tuple[str, ...]
+    ):
+        # type: (...) -> None
+        self.app = app
+        self.use_x_forwarded_for = use_x_forwarded_for
+        self.span_origin = span_origin
+        self.http_methods_to_capture = http_methods_to_capture
+
+    def __call__(self, environ, start_response):
+        # type: (Dict[str, str], Callable[..., Any]) -> _ScopedResponse
+        if _wsgi_middleware_applied.get(False):
+            return self.app(environ, start_response)
+
+        _wsgi_middleware_applied.set(True)
+        try:
+            with sentry_sdk.isolation_scope() as scope:
+                with track_session(scope, session_mode="request"):
+                    with capture_internal_exceptions():
+                        scope.clear_breadcrumbs()
+                        scope._name = "wsgi"
+                        scope.add_event_processor(
+                            _make_wsgi_event_processor(
+                                environ, self.use_x_forwarded_for
+                            )
+                        )
+
+                    method = environ.get("REQUEST_METHOD", "").upper()
+                    transaction = None
+                    if method in self.http_methods_to_capture:
+                        transaction = continue_trace(
+                            environ,
+                            op=OP.HTTP_SERVER,
+                            name="generic WSGI request",
+                            source=TransactionSource.ROUTE,
+                            origin=self.span_origin,
+                        )
+
+                    with (
+                        sentry_sdk.start_transaction(
+                            transaction,
+                            custom_sampling_context={"wsgi_environ": environ},
+                        )
+                        if transaction is not None
+                        else nullcontext()
+                    ):
+                        try:
+                            response = self.app(
+                                environ,
+                                partial(
+                                    _sentry_start_response, start_response, transaction
+                                ),
+                            )
+                        except BaseException:
+                            reraise(*_capture_exception())
+        finally:
+            _wsgi_middleware_applied.set(False)
+
+        return _ScopedResponse(scope, response)
+
+
+def _sentry_start_response(  # type: ignore
+    old_start_response,  # type: StartResponse
+    transaction,  # type: Optional[Transaction]
+    status,  # type: str
+    response_headers,  # type: WsgiResponseHeaders
+    exc_info=None,  # type: Optional[WsgiExcInfo]
+):
+    # type: (...) -> WsgiResponseIter
+    with capture_internal_exceptions():
+        status_int = int(status.split(" ", 1)[0])
+        if transaction is not None:
+            transaction.set_http_status(status_int)
+
+    if exc_info is None:
+        # The Django Rest Framework WSGI test client, and likely other
+        # (incorrect) implementations, cannot deal with the exc_info argument
+        # if one is present. Avoid providing a third argument if not necessary.
+        return old_start_response(status, response_headers)
+    else:
+        return old_start_response(status, response_headers, exc_info)
+
+
+def _get_environ(environ):
+    # type: (Dict[str, str]) -> Iterator[Tuple[str, str]]
+    """
+    Returns our explicitly included environment variables we want to
+    capture (server name, port and remote addr if pii is enabled).
+    """
+    keys = ["SERVER_NAME", "SERVER_PORT"]
+    if should_send_default_pii():
+        # make debugging of proxy setup easier. Proxy headers are
+        # in headers.
+        keys += ["REMOTE_ADDR"]
+
+    for key in keys:
+        if key in environ:
+            yield key, environ[key]
+
+
+def get_client_ip(environ):
+    # type: (Dict[str, str]) -> Optional[Any]
+    """
+    Infer the user IP address from various headers. This cannot be used in
+    security sensitive situations since the value may be forged from a client,
+    but it's good enough for the event payload.
+    """
+    try:
+        return environ["HTTP_X_FORWARDED_FOR"].split(",")[0].strip()
+    except (KeyError, IndexError):
+        pass
+
+    try:
+        return environ["HTTP_X_REAL_IP"]
+    except KeyError:
+        pass
+
+    return environ.get("REMOTE_ADDR")
+
+
+def _capture_exception():
+    # type: () -> ExcInfo
+    """
+    Captures the current exception and sends it to Sentry.
+    Returns the ExcInfo tuple to it can be reraised afterwards.
+    """
+    exc_info = sys.exc_info()
+    e = exc_info[1]
+
+    # SystemExit(0) is the only uncaught exception that is expected behavior
+    should_skip_capture = isinstance(e, SystemExit) and e.code in (0, None)
+    if not should_skip_capture:
+        event, hint = event_from_exception(
+            exc_info,
+            client_options=sentry_sdk.get_client().options,
+            mechanism={"type": "wsgi", "handled": False},
+        )
+        sentry_sdk.capture_event(event, hint=hint)
+
+    return exc_info
+
+
+class _ScopedResponse:
+    """
+    Users a separate scope for each response chunk.
+
+    This will make WSGI apps more tolerant against:
+    - WSGI servers streaming responses from a different thread/from
+      different threads than the one that called start_response
+    - close() not being called
+    - WSGI servers streaming responses interleaved from the same thread
+    """
+
+    __slots__ = ("_response", "_scope")
+
+    def __init__(self, scope, response):
+        # type: (sentry_sdk.scope.Scope, Iterator[bytes]) -> None
+        self._scope = scope
+        self._response = response
+
+    def __iter__(self):
+        # type: () -> Iterator[bytes]
+        iterator = iter(self._response)
+
+        while True:
+            with use_isolation_scope(self._scope):
+                try:
+                    chunk = next(iterator)
+                except StopIteration:
+                    break
+                except BaseException:
+                    reraise(*_capture_exception())
+
+            yield chunk
+
+    def close(self):
+        # type: () -> None
+        with use_isolation_scope(self._scope):
+            try:
+                self._response.close()  # type: ignore
+            except AttributeError:
+                pass
+            except BaseException:
+                reraise(*_capture_exception())
+
+
+def _make_wsgi_event_processor(environ, use_x_forwarded_for):
+    # type: (Dict[str, str], bool) -> EventProcessor
+    # It's a bit unfortunate that we have to extract and parse the request data
+    # from the environ so eagerly, but there are a few good reasons for this.
+    #
+    # We might be in a situation where the scope never gets torn down
+    # properly. In that case we will have an unnecessary strong reference to
+    # all objects in the environ (some of which may take a lot of memory) when
+    # we're really just interested in a few of them.
+    #
+    # Keeping the environment around for longer than the request lifecycle is
+    # also not necessarily something uWSGI can deal with:
+    # https://github.com/unbit/uwsgi/issues/1950
+
+    client_ip = get_client_ip(environ)
+    request_url = get_request_url(environ, use_x_forwarded_for)
+    query_string = environ.get("QUERY_STRING")
+    method = environ.get("REQUEST_METHOD")
+    env = dict(_get_environ(environ))
+    headers = _filter_headers(dict(_get_headers(environ)))
+
+    def event_processor(event, hint):
+        # type: (Event, Dict[str, Any]) -> Event
+        with capture_internal_exceptions():
+            # if the code below fails halfway through we at least have some data
+            request_info = event.setdefault("request", {})
+
+            if should_send_default_pii():
+                user_info = event.setdefault("user", {})
+                if client_ip:
+                    user_info.setdefault("ip_address", client_ip)
+
+            request_info["url"] = request_url
+            request_info["query_string"] = query_string
+            request_info["method"] = method
+            request_info["env"] = env
+            request_info["headers"] = headers
+
+        return event
+
+    return event_processor