about summary refs log tree commit diff
path: root/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse
diff options
context:
space:
mode:
authorS. Solomon Darnell2025-03-28 21:52:21 -0500
committerS. Solomon Darnell2025-03-28 21:52:21 -0500
commit4a52a71956a8d46fcb7294ac71734504bb09bcc2 (patch)
treeee3dc5af3b6313e921cd920906356f5d4febc4ed /.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse
parentcc961e04ba734dd72309fb548a2f97d67d578813 (diff)
downloadgn-ai-master.tar.gz
two version of R2R are here HEAD master
Diffstat (limited to '.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse')
-rw-r--r--.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/__init__.py11
-rw-r--r--.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_constants.py66
-rw-r--r--.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_cpu.py63
-rw-r--r--.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_exporter.py351
-rw-r--r--.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_filter.py194
-rw-r--r--.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/__init__.py23
-rw-r--r--.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/_client.py93
-rw-r--r--.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/_configuration.py59
-rw-r--r--.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/_operations/__init__.py19
-rw-r--r--.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/_operations/_operations.py544
-rw-r--r--.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/_operations/_patch.py20
-rw-r--r--.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/_patch.py20
-rw-r--r--.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/_serialization.py1998
-rw-r--r--.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/_vendor.py26
-rw-r--r--.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/aio/__init__.py23
-rw-r--r--.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/aio/_client.py95
-rw-r--r--.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/aio/_configuration.py59
-rw-r--r--.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/aio/_operations/__init__.py19
-rw-r--r--.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/aio/_operations/_operations.py464
-rw-r--r--.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/aio/_operations/_patch.py20
-rw-r--r--.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/aio/_patch.py20
-rw-r--r--.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/aio/_vendor.py26
-rw-r--r--.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/models/__init__.py65
-rw-r--r--.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/models/_enums.py111
-rw-r--r--.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/models/_models.py1123
-rw-r--r--.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/models/_patch.py20
-rw-r--r--.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/py.typed1
-rw-r--r--.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_live_metrics.py306
-rw-r--r--.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_policy.py36
-rw-r--r--.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_processor.py33
-rw-r--r--.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_projection.py98
-rw-r--r--.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_state.py190
-rw-r--r--.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_types.py235
-rw-r--r--.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_utils.py179
-rw-r--r--.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_validate.py139
35 files changed, 6749 insertions, 0 deletions
diff --git a/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/__init__.py b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/__init__.py
new file mode 100644
index 00000000..39d410a7
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/__init__.py
@@ -0,0 +1,11 @@
+# -------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License in the project root for
+# license information.
+# -------------------------------------------------------------------------
+
+from azure.monitor.opentelemetry.exporter._quickpulse._live_metrics import enable_live_metrics
+
+__all__ = [
+    "enable_live_metrics",
+]
diff --git a/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_constants.py b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_constants.py
new file mode 100644
index 00000000..29c03e8b
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_constants.py
@@ -0,0 +1,66 @@
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License.
+import sys
+
+# cSpell:disable
+
+# (OpenTelemetry metric name, Quickpulse metric name)
+# Memory
+_COMMITTED_BYTES_NAME = ("azuremonitor.memorycommittedbytes", "\\Memory\\Committed Bytes")
+_PROCESS_PHYSICAL_BYTES_NAME = ("azuremonitor.processphysicalbytes", "\\Process\\Physical Bytes")
+# CPU
+_PROCESSOR_TIME_NAME = ("azuremonitor.processortotalprocessortime", "\\Processor(_Total)\\% Processor Time")
+_PROCESS_TIME_NORMALIZED_NAME = ("azuremonitor.processtimenormalized", "\\% Process\\Processor Time Normalized")
+# Request
+_REQUEST_RATE_NAME = ("azuremonitor.requestssec", "\\ApplicationInsights\\Requests/Sec")
+_REQUEST_FAILURE_RATE_NAME = ("azuremonitor.requestsfailedsec", "\\ApplicationInsights\\Requests Failed/Sec")
+_REQUEST_DURATION_NAME = ("azuremonitor.requestduration", "\\ApplicationInsights\\Request Duration")
+# Dependency
+_DEPENDENCY_RATE_NAME = ("azuremonitor.dependencycallssec", "\\ApplicationInsights\\Dependency Calls/Sec")
+_DEPENDENCY_FAILURE_RATE_NAME = (
+    "azuremonitor.dependencycallsfailedsec",
+    "\\ApplicationInsights\\Dependency Calls Failed/Sec",
+)
+_DEPENDENCY_DURATION_NAME = ("azuremonitor.dependencycallduration", "\\ApplicationInsights\\Dependency Call Duration")
+# Exception
+_EXCEPTION_RATE_NAME = ("azuremonitor.exceptionssec", "\\ApplicationInsights\\Exceptions/Sec")
+
+_QUICKPULSE_METRIC_NAME_MAPPINGS = dict(
+    [
+        _COMMITTED_BYTES_NAME,
+        _PROCESS_PHYSICAL_BYTES_NAME,
+        _PROCESSOR_TIME_NAME,
+        _PROCESS_TIME_NORMALIZED_NAME,
+        _REQUEST_RATE_NAME,
+        _REQUEST_FAILURE_RATE_NAME,
+        _REQUEST_DURATION_NAME,
+        _DEPENDENCY_RATE_NAME,
+        _DEPENDENCY_FAILURE_RATE_NAME,
+        _DEPENDENCY_DURATION_NAME,
+        _EXCEPTION_RATE_NAME,
+    ]
+)
+
+# Quickpulse intervals
+_SHORT_PING_INTERVAL_SECONDS = 5
+_POST_INTERVAL_SECONDS = 1
+_LONG_PING_INTERVAL_SECONDS = 60
+_POST_CANCEL_INTERVAL_SECONDS = 20
+
+# Response Headers
+
+_QUICKPULSE_ETAG_HEADER_NAME = "x-ms-qps-configuration-etag"
+_QUICKPULSE_POLLING_HEADER_NAME = "x-ms-qps-service-polling-interval-hint"
+_QUICKPULSE_REDIRECT_HEADER_NAME = "x-ms-qps-service-endpoint-redirect-v2"
+_QUICKPULSE_SUBSCRIBED_HEADER_NAME = "x-ms-qps-subscribed"
+
+# Projections (filtering)
+
+_QUICKPULSE_PROJECTION_COUNT = "Count()"
+_QUICKPULSE_PROJECTION_DURATION = "Duration"
+_QUICKPULSE_PROJECTION_CUSTOM = "CustomDimensions."
+
+_QUICKPULSE_PROJECTION_MAX_VALUE = sys.maxsize
+_QUICKPULSE_PROJECTION_MIN_VALUE = -sys.maxsize - 1
+
+# cSpell:enable
diff --git a/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_cpu.py b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_cpu.py
new file mode 100644
index 00000000..08dce9d0
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_cpu.py
@@ -0,0 +1,63 @@
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License.
+# cSpell:disable
+
+from datetime import datetime
+from typing import Iterable
+
+import psutil
+
+from opentelemetry.metrics import CallbackOptions, Observation
+
+from azure.monitor.opentelemetry.exporter._quickpulse._state import (
+    _get_quickpulse_last_process_cpu,
+    _get_quickpulse_last_process_time,
+    _get_quickpulse_process_elapsed_time,
+    _set_quickpulse_last_process_cpu,
+    _set_quickpulse_last_process_time,
+    _set_quickpulse_process_elapsed_time,
+)
+
+PROCESS = psutil.Process()
+NUM_CPUS = psutil.cpu_count()
+
+
+#  pylint: disable=unused-argument
+def _get_process_memory(options: CallbackOptions) -> Iterable[Observation]:
+    memory = 0
+    try:
+        # rss is non-swapped physical memory a process has used
+        memory = PROCESS.memory_info().rss
+    except (psutil.NoSuchProcess, psutil.AccessDenied):
+        pass
+    yield Observation(memory, {})
+
+
+# pylint: disable=unused-argument
+def _get_process_time_normalized_old(options: CallbackOptions) -> Iterable[Observation]:
+    normalized_cpu_percentage = 0.0
+    try:
+        cpu_times = PROCESS.cpu_times()
+        # total process time is user + system in s
+        total_time_s = cpu_times.user + cpu_times.system
+        process_time_s = total_time_s - _get_quickpulse_last_process_time()
+        _set_quickpulse_last_process_time(process_time_s)
+        # Find elapsed time in s since last collection
+        current_time = datetime.now()
+        elapsed_time_s = (current_time - _get_quickpulse_process_elapsed_time()).total_seconds()
+        _set_quickpulse_process_elapsed_time(current_time)
+        # Obtain cpu % by dividing by elapsed time
+        cpu_percentage = process_time_s / elapsed_time_s
+        # Normalize by dividing by amount of logical cpus
+        normalized_cpu_percentage = cpu_percentage / NUM_CPUS
+        _set_quickpulse_last_process_cpu(normalized_cpu_percentage)
+    except (psutil.NoSuchProcess, psutil.AccessDenied, ZeroDivisionError):
+        pass
+    yield Observation(normalized_cpu_percentage, {})
+
+
+# pylint: disable=unused-argument
+def _get_process_time_normalized(options: CallbackOptions) -> Iterable[Observation]:
+    yield Observation(_get_quickpulse_last_process_cpu(), {})
+
+# cSpell:enable
diff --git a/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_exporter.py b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_exporter.py
new file mode 100644
index 00000000..f61cebe5
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_exporter.py
@@ -0,0 +1,351 @@
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License.
+import logging
+from typing import Any, Optional
+import weakref
+
+from opentelemetry.context import (
+    _SUPPRESS_INSTRUMENTATION_KEY,
+    attach,
+    detach,
+    set_value,
+)
+from opentelemetry.sdk.metrics import (
+    Counter,
+    Histogram,
+)
+from opentelemetry.sdk.metrics._internal.point import MetricsData
+from opentelemetry.sdk.metrics.export import (
+    AggregationTemporality,
+    MetricExporter,
+    MetricExportResult,
+    MetricsData as OTMetricsData,
+    MetricReader,
+)
+
+from azure.core.pipeline.policies import ContentDecodePolicy
+from azure.monitor.opentelemetry.exporter._quickpulse._constants import (
+    _LONG_PING_INTERVAL_SECONDS,
+    _POST_CANCEL_INTERVAL_SECONDS,
+    _POST_INTERVAL_SECONDS,
+    _QUICKPULSE_ETAG_HEADER_NAME,
+    _QUICKPULSE_SUBSCRIBED_HEADER_NAME,
+)
+from azure.monitor.opentelemetry.exporter._quickpulse._generated._configuration import QuickpulseClientConfiguration
+from azure.monitor.opentelemetry.exporter._quickpulse._generated._client import QuickpulseClient
+from azure.monitor.opentelemetry.exporter._quickpulse._generated.models import MonitoringDataPoint
+from azure.monitor.opentelemetry.exporter._quickpulse._filter import _update_filter_configuration
+from azure.monitor.opentelemetry.exporter._quickpulse._policy import _QuickpulseRedirectPolicy
+from azure.monitor.opentelemetry.exporter._quickpulse._state import (
+    _get_and_clear_quickpulse_documents,
+    _get_global_quickpulse_state,
+    _get_quickpulse_etag,
+    _is_ping_state,
+    _set_global_quickpulse_state,
+    _set_quickpulse_etag,
+    _QuickpulseState,
+)
+from azure.monitor.opentelemetry.exporter._quickpulse._utils import (
+    _metric_to_quick_pulse_data_points,
+)
+from azure.monitor.opentelemetry.exporter._connection_string_parser import ConnectionStringParser
+from azure.monitor.opentelemetry.exporter._utils import (
+    _get_auth_policy,
+    _ticks_since_dot_net_epoch,
+    PeriodicTask,
+)
+
+
+_logger = logging.getLogger(__name__)
+
+
+_QUICKPULSE_METRIC_TEMPORALITIES = {
+    # Use DELTA temporalities because we want to reset the counts every collection interval
+    Counter: AggregationTemporality.DELTA,
+    Histogram: AggregationTemporality.DELTA,
+}
+
+
+class _Response:
+    """Response that encapsulates pipeline response and response headers from
+    QuickPulse client.
+    """
+
+    def __init__(self, pipeline_response, deserialized, response_headers):
+        self._pipeline_response = pipeline_response
+        self._deserialized = deserialized
+        self._response_headers = response_headers
+
+
+class _UnsuccessfulQuickPulsePostError(Exception):
+    """Exception raised to indicate unsuccessful QuickPulse post for backoff logic."""
+
+
+class _QuickpulseExporter(MetricExporter):
+
+    def __init__(self, **kwargs: Any) -> None:
+        """Metric exporter for Quickpulse.
+
+        :param str connection_string: The connection string used for your Application Insights resource.
+        :keyword TokenCredential credential: Token credential, such as ManagedIdentityCredential or
+            ClientSecretCredential, used for Azure Active Directory (AAD) authentication. Defaults to None.
+        :rtype: None
+        """
+        parsed_connection_string = ConnectionStringParser(kwargs.get("connection_string"))
+
+        self._live_endpoint = parsed_connection_string.live_endpoint
+        self._instrumentation_key = parsed_connection_string.instrumentation_key
+        self._credential = kwargs.get("credential")
+        config = QuickpulseClientConfiguration(credential=self._credential)  # type: ignore
+        qp_redirect_policy = _QuickpulseRedirectPolicy(permit_redirects=False)
+        policies = [
+            # Custom redirect policy for QP
+            qp_redirect_policy,
+            # Needed for serialization
+            ContentDecodePolicy(),
+            # Logging for client calls
+            config.http_logging_policy,
+            _get_auth_policy(self._credential, config.authentication_policy),
+            config.authentication_policy,
+            # Explicitly disabling to avoid tracing live metrics calls
+            # DistributedTracingPolicy(),
+        ]
+        self._client = QuickpulseClient(
+            credential=self._credential, endpoint=self._live_endpoint, policies=policies  # type: ignore
+        )
+        # Create a weakref of the client to the redirect policy so the endpoint can be
+        # dynamically modified if redirect does occur
+        qp_redirect_policy._qp_client_ref = weakref.ref(self._client)
+
+        MetricExporter.__init__(
+            self,
+            preferred_temporality=_QUICKPULSE_METRIC_TEMPORALITIES,  # type: ignore
+        )
+
+    def export(
+        self,
+        metrics_data: OTMetricsData,
+        timeout_millis: float = 10_000,
+        **kwargs: Any,
+    ) -> MetricExportResult:
+        """Exports a batch of metric data
+
+        :param metrics_data: OpenTelemetry Metric(s) to export.
+        :type metrics_data: ~opentelemetry.sdk.metrics._internal.point.MetricsData
+        :param timeout_millis: The maximum amount of time to wait for each export. Not currently used.
+        :type timeout_millis: float
+        :return: The result of the export.
+        :rtype: ~opentelemetry.sdk.metrics.export.MetricExportResult
+        """
+        result = MetricExportResult.SUCCESS
+        base_monitoring_data_point = kwargs.get("base_monitoring_data_point")
+        if base_monitoring_data_point is None:
+            return MetricExportResult.FAILURE
+        data_points = _metric_to_quick_pulse_data_points(
+            metrics_data,
+            base_monitoring_data_point=base_monitoring_data_point,
+            documents=_get_and_clear_quickpulse_documents(),
+        )
+        configuration_etag = _get_quickpulse_etag() or ""
+        token = attach(set_value(_SUPPRESS_INSTRUMENTATION_KEY, True))
+        # pylint: disable=R1702
+        try:
+            post_response = self._client.publish(  # type: ignore
+                endpoint=self._live_endpoint,
+                monitoring_data_points=data_points,
+                ikey=self._instrumentation_key,  # type: ignore
+                configuration_etag=configuration_etag,
+                transmission_time=_ticks_since_dot_net_epoch(),
+                cls=_Response,
+            )
+            if not post_response:
+                # If no response, assume unsuccessful
+                result = MetricExportResult.FAILURE
+            else:
+                header = post_response._response_headers.get(  # pylint: disable=protected-access
+                    _QUICKPULSE_SUBSCRIBED_HEADER_NAME
+                )
+                if header != "true":
+                    # User leaving the live metrics page will be treated as an unsuccessful
+                    result = MetricExportResult.FAILURE
+                else:
+                    # Check if etag has changed
+                    etag = post_response._response_headers.get(  # pylint: disable=protected-access
+                        _QUICKPULSE_ETAG_HEADER_NAME
+                    )
+                    if etag and etag != configuration_etag:
+                        config = (
+                            post_response._pipeline_response.http_response.content  # pylint: disable=protected-access
+                        )
+                        # Content will only be populated if configuration has changed (etag is different)
+                        if config:
+                            # Update and apply configuration changes
+                            try:
+                                _update_filter_configuration(etag, config)
+                            except Exception:  # pylint: disable=broad-except
+                                _logger.exception("Exception occurred while updating filter config.")
+                                result = MetricExportResult.FAILURE
+        except Exception:  # pylint: disable=broad-except
+            _logger.exception("Exception occurred while publishing live metrics.")
+            result = MetricExportResult.FAILURE
+        finally:
+            detach(token)
+        return result
+
+    def force_flush(
+        self,
+        timeout_millis: float = 10_000,
+    ) -> bool:
+        """
+        Ensure that export of any metrics currently received by the exporter
+        are completed as soon as possible. Called when SDK is flushed.
+
+        :param timeout_millis: The maximum amount of time to wait for shutdown. Not currently used.
+        :type timeout_millis: float
+        :return: The result of the export.
+        :rtype: bool
+        """
+        return True
+
+    def shutdown(
+        self,
+        timeout_millis: float = 30_000,
+        **kwargs: Any,
+    ) -> None:
+        """Shuts down the exporter.
+
+        Called when the SDK is shut down.
+
+        :param timeout_millis: The maximum amount of time to wait for shutdown. Not currently used.
+        :type timeout_millis: float
+        """
+
+    def _ping(self, monitoring_data_point: MonitoringDataPoint) -> Optional[_Response]:
+        ping_response = None
+        token = attach(set_value(_SUPPRESS_INSTRUMENTATION_KEY, True))
+        etag = _get_quickpulse_etag() or ""
+        try:
+            ping_response = self._client.is_subscribed(  # type: ignore
+                endpoint=self._live_endpoint,
+                monitoring_data_point=monitoring_data_point,
+                ikey=self._instrumentation_key,  # type: ignore
+                transmission_time=_ticks_since_dot_net_epoch(),
+                machine_name=monitoring_data_point.machine_name,
+                instance_name=monitoring_data_point.instance,
+                stream_id=monitoring_data_point.stream_id,
+                role_name=monitoring_data_point.role_name,
+                invariant_version=monitoring_data_point.invariant_version,  # type: ignore
+                configuration_etag=etag,
+                cls=_Response,
+            )
+            return ping_response  # type: ignore
+        except Exception:  # pylint: disable=broad-except
+            _logger.exception("Exception occurred while pinging live metrics.")
+        detach(token)
+        return ping_response
+
+
+class _QuickpulseMetricReader(MetricReader):
+
+    def __init__(
+        self,
+        exporter: _QuickpulseExporter,
+        base_monitoring_data_point: MonitoringDataPoint,
+    ) -> None:
+        self._exporter = exporter
+        self._base_monitoring_data_point = base_monitoring_data_point
+        self._elapsed_num_seconds = 0
+        self._worker = PeriodicTask(
+            interval=_POST_INTERVAL_SECONDS,
+            function=self._ticker,
+            name="QuickpulseMetricReader",
+        )
+        self._worker.daemon = True
+        super().__init__(
+            preferred_temporality=self._exporter._preferred_temporality,
+            preferred_aggregation=self._exporter._preferred_aggregation,
+        )
+        self._worker.start()
+
+    # pylint: disable=protected-access
+    # pylint: disable=too-many-nested-blocks
+    def _ticker(self) -> None:
+        if _is_ping_state():
+            # Send a ping if elapsed number of request meets the threshold
+            if self._elapsed_num_seconds % _get_global_quickpulse_state().value == 0:
+                ping_response = self._exporter._ping(
+                    self._base_monitoring_data_point,
+                )
+                if ping_response:
+                    try:
+                        subscribed = ping_response._response_headers.get(_QUICKPULSE_SUBSCRIBED_HEADER_NAME)
+                        if subscribed and subscribed == "true":
+                            # Switch state to post if subscribed
+                            _set_global_quickpulse_state(_QuickpulseState.POST_SHORT)
+                            self._elapsed_num_seconds = 0
+                            # Update config etag
+                            etag = ping_response._response_headers.get(_QUICKPULSE_ETAG_HEADER_NAME)
+                            if etag is None:
+                                etag = ""
+                            if _get_quickpulse_etag() != etag:
+                                _set_quickpulse_etag(etag)
+                            # TODO: Set default document filter config from response body
+                            # config = ping_response._pipeline_response.http_response.content
+                        else:
+                            # Backoff after _LONG_PING_INTERVAL_SECONDS (60s) of no successful requests
+                            if (
+                                _get_global_quickpulse_state() is _QuickpulseState.PING_SHORT
+                                and self._elapsed_num_seconds >= _LONG_PING_INTERVAL_SECONDS
+                            ):
+                                _set_global_quickpulse_state(_QuickpulseState.PING_LONG)
+                            # Reset etag to default if not subscribed
+                            _set_quickpulse_etag("")
+                    except Exception:  # pylint: disable=broad-except
+                        _logger.exception("Exception occurred while reading live metrics ping response.")
+                        _set_quickpulse_etag("")
+                # TODO: Implement redirect
+                else:
+                    # Erroneous ping responses instigate backoff logic
+                    # Backoff after _LONG_PING_INTERVAL_SECONDS (60s) of no successful requests
+                    if (
+                        _get_global_quickpulse_state() is _QuickpulseState.PING_SHORT
+                        and self._elapsed_num_seconds >= _LONG_PING_INTERVAL_SECONDS
+                    ):
+                        _set_global_quickpulse_state(_QuickpulseState.PING_LONG)
+                        # Reset etag to default if error
+                        _set_quickpulse_etag("")
+        else:
+            try:
+                self.collect()
+            except _UnsuccessfulQuickPulsePostError:
+                # Unsuccessful posts instigate backoff logic
+                # Backoff after _POST_CANCEL_INTERVAL_SECONDS (20s) of no successful requests
+                # And resume pinging
+                if self._elapsed_num_seconds >= _POST_CANCEL_INTERVAL_SECONDS:
+                    _set_global_quickpulse_state(_QuickpulseState.PING_SHORT)
+                    # Reset etag to default
+                    _set_quickpulse_etag("")
+                    self._elapsed_num_seconds = 0
+
+        self._elapsed_num_seconds += 1
+
+    def _receive_metrics(
+        self,
+        metrics_data: MetricsData,
+        timeout_millis: float = 10_000,
+        **kwargs,
+    ) -> None:
+        result = self._exporter.export(
+            metrics_data,
+            timeout_millis=timeout_millis,
+            base_monitoring_data_point=self._base_monitoring_data_point,
+        )
+        if result is MetricExportResult.FAILURE:
+            # There is currently no way to propagate unsuccessful metric post so
+            # we raise an _UnsuccessfulQuickPulsePostError exception. MUST handle
+            # this exception whenever `collect()` is called
+            raise _UnsuccessfulQuickPulsePostError()
+
+    def shutdown(self, timeout_millis: float = 30_000, **kwargs) -> None:
+        self._worker.cancel()
+        self._worker.join()
diff --git a/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_filter.py b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_filter.py
new file mode 100644
index 00000000..59e85eda
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_filter.py
@@ -0,0 +1,194 @@
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License.
+import json
+
+from dataclasses import fields
+from typing import Any, Dict, List
+
+from azure.monitor.opentelemetry.exporter._quickpulse._generated.models import (
+    DerivedMetricInfo,
+    DocumentStreamInfo,
+    FilterConjunctionGroupInfo,
+    FilterInfo,
+    PredicateType,
+    TelemetryType,
+)
+from azure.monitor.opentelemetry.exporter._quickpulse._projection import (
+    _init_derived_metric_projection,
+)
+from azure.monitor.opentelemetry.exporter._quickpulse._state import (
+    _clear_quickpulse_projection_map,
+    _set_quickpulse_derived_metric_infos,
+    _set_quickpulse_doc_stream_infos,
+    _set_quickpulse_etag,
+)
+from azure.monitor.opentelemetry.exporter._quickpulse._types import (
+    _DATA_FIELD_NAMES,
+    _TelemetryData,
+)
+from azure.monitor.opentelemetry.exporter._quickpulse._utils import _filter_time_stamp_to_ms
+from azure.monitor.opentelemetry.exporter._quickpulse._validate import (
+    _validate_derived_metric_info,
+    _validate_document_filter_group_info,
+)
+
+
+# Apply filter configuration based off response
+# Called on post response from exporter
+def _update_filter_configuration(etag: str, config_bytes: bytes):
+    # Clear projection map
+    _clear_quickpulse_projection_map()
+    # config is a byte string that when decoded is a json
+    config = json.loads(config_bytes.decode("utf-8"))
+    # Process metric filter configuration
+    _parse_metric_filter_configuration(config)
+    # # Process document filter configuration
+    _parse_document_filter_configuration(config)
+    # Update new etag
+    _set_quickpulse_etag(etag)
+
+
+def _parse_metric_filter_configuration(config: Dict[str, Any]) -> None:
+    seen_ids = set()
+    # Process metric filter configuration
+    metric_infos: Dict[TelemetryType, List[DerivedMetricInfo]] = {}
+    for metric_info_dict in config.get("Metrics", []):
+        metric_info = DerivedMetricInfo.from_dict(metric_info_dict)
+        # Skip duplicate ids
+        if metric_info.id in seen_ids:
+            continue
+        if not _validate_derived_metric_info(metric_info):
+            continue
+        # Rename exception fields by parsing out "Exception." portion
+        for filter_group in metric_info.filter_groups:
+            _rename_exception_fields_for_filtering(filter_group)
+        telemetry_type: TelemetryType = TelemetryType(metric_info.telemetry_type)
+        metric_info_list = metric_infos.get(telemetry_type, [])
+        metric_info_list.append(metric_info)
+        metric_infos[telemetry_type] = metric_info_list
+        seen_ids.add(metric_info.id)
+        # Initialize projections from this derived metric info
+        _init_derived_metric_projection(metric_info)
+    _set_quickpulse_derived_metric_infos(metric_infos)
+
+
+def _parse_document_filter_configuration(config: Dict[str, Any]) -> None:
+    # Process document filter configuration
+    doc_infos: Dict[TelemetryType, Dict[str, List[FilterConjunctionGroupInfo]]] = {}
+    for doc_stream_dict in config.get("DocumentStreams", []):
+        doc_stream = DocumentStreamInfo.from_dict(doc_stream_dict)
+        for doc_filter_group in doc_stream.document_filter_groups:
+            if not _validate_document_filter_group_info(doc_filter_group):
+                continue
+            # Rename exception fields by parsing out "Exception." portion
+            _rename_exception_fields_for_filtering(doc_filter_group.filters)
+            telemetry_type: TelemetryType = TelemetryType(doc_filter_group.telemetry_type)
+            if telemetry_type not in doc_infos:
+                doc_infos[telemetry_type] = {}
+            if doc_stream.id not in doc_infos[telemetry_type]:
+                doc_infos[telemetry_type][doc_stream.id] = []
+            doc_infos[telemetry_type][doc_stream.id].append(doc_filter_group.filters)
+    _set_quickpulse_doc_stream_infos(doc_infos)
+
+
+def _rename_exception_fields_for_filtering(filter_groups: FilterConjunctionGroupInfo):
+    for filter in filter_groups.filters:
+        if filter.field_name.startswith("Exception."):
+            filter.field_name = filter.field_name.replace("Exception.", "")
+
+
+def _check_metric_filters(metric_infos: List[DerivedMetricInfo], data: _TelemetryData) -> bool:
+    match = False
+    for metric_info in metric_infos:
+        # Should only be a single `FilterConjunctionGroupInfo` in `filter_groups`
+        # but we use a logical OR to match if there is more than one
+        for group in metric_info.filter_groups:
+            match = match or _check_filters(group.filters, data)
+    return match
+
+
+# pylint: disable=R0911
+def _check_filters(filters: List[FilterInfo], data: _TelemetryData) -> bool:
+    if not filters:
+        return True
+    # # All of the filters need to match for this to return true (and operation).
+    for filter in filters:
+        name = filter.field_name
+        predicate = filter.predicate
+        comparand = filter.comparand
+        if name == "*":
+            return _check_any_field_filter(filter, data)
+        if name.startswith("CustomDimensions."):
+            return _check_custom_dim_field_filter(filter, data.custom_dimensions)
+        field_names = _DATA_FIELD_NAMES.get(type(data))
+        if field_names is None:
+            field_names = {}
+        field_name = field_names.get(name.lower(), "")
+        val = getattr(data, field_name, "")
+        if name == "Success":
+            if predicate == PredicateType.EQUAL:
+                return str(val).lower() == comparand.lower()
+            if predicate == PredicateType.NOT_EQUAL:
+                return str(val).lower() != comparand.lower()
+        elif name in ("ResultCode", "ResponseCode", "Duration"):
+            try:
+                val = int(val)
+            except Exception:  # pylint: disable=broad-exception-caught
+                return False
+            numerical_val = _filter_time_stamp_to_ms(comparand) if name == "Duration" else int(comparand)
+            if numerical_val is None:
+                return False
+            if predicate == PredicateType.EQUAL:
+                return val == numerical_val
+            if predicate == PredicateType.NOT_EQUAL:
+                return val != numerical_val
+            if predicate == PredicateType.GREATER_THAN:
+                return val > numerical_val
+            if predicate == PredicateType.GREATER_THAN_OR_EQUAL:
+                return val >= numerical_val
+            if predicate == PredicateType.LESS_THAN:
+                return val < numerical_val
+            if predicate == PredicateType.LESS_THAN_OR_EQUAL:
+                return val <= numerical_val
+            return False
+        else:
+            # string fields
+            return _field_string_compare(str(val), comparand, predicate)
+
+    return False
+
+
+def _check_any_field_filter(filter: FilterInfo, data: _TelemetryData) -> bool:
+    # At this point, the only predicates possible to pass in are Contains and DoesNotContain
+    # At config validation time the predicate is checked to be one of these two.
+    for field in fields(data):
+        if field.name == "custom_dimensions":
+            for val in data.custom_dimensions.values():
+                if _field_string_compare(str(val), filter.comparand, filter.predicate):
+                    return True
+        else:
+            val = getattr(data, field.name, None)  # type: ignore
+            if val is not None:
+                if _field_string_compare(str(val), filter.comparand, filter.predicate):
+                    return True
+    return False
+
+
+def _check_custom_dim_field_filter(filter: FilterInfo, custom_dimensions: Dict[str, str]) -> bool:
+    field = filter.field_name.replace("CustomDimensions.", "")
+    value = custom_dimensions.get(field)
+    if value is not None:
+        return _field_string_compare(str(value), filter.comparand, filter.predicate)
+    return False
+
+
+def _field_string_compare(value: str, comparand: str, predicate: str) -> bool:
+    if predicate == PredicateType.EQUAL:
+        return value == comparand
+    if predicate == PredicateType.NOT_EQUAL:
+        return value != comparand
+    if predicate == PredicateType.CONTAINS:
+        return comparand.lower() in value.lower()
+    if predicate == PredicateType.DOES_NOT_CONTAIN:
+        return comparand.lower() not in value.lower()
+    return False
diff --git a/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/__init__.py b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/__init__.py
new file mode 100644
index 00000000..664b539c
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/__init__.py
@@ -0,0 +1,23 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from ._client import QuickpulseClient
+
+try:
+    from ._patch import __all__ as _patch_all
+    from ._patch import *  # pylint: disable=unused-wildcard-import
+except ImportError:
+    _patch_all = []
+from ._patch import patch_sdk as _patch_sdk
+
+__all__ = [
+    "QuickpulseClient",
+]
+__all__.extend([p for p in _patch_all if p not in __all__])
+
+_patch_sdk()
diff --git a/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/_client.py b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/_client.py
new file mode 100644
index 00000000..d477f7d4
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/_client.py
@@ -0,0 +1,93 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from copy import deepcopy
+from typing import Any, TYPE_CHECKING
+
+from azure.core import PipelineClient
+from azure.core.pipeline import policies
+from azure.core.rest import HttpRequest, HttpResponse
+
+from . import models as _models
+from ._configuration import QuickpulseClientConfiguration
+from ._operations import QuickpulseClientOperationsMixin
+from ._serialization import Deserializer, Serializer
+
+if TYPE_CHECKING:
+    # pylint: disable=unused-import,ungrouped-imports
+    from azure.core.credentials import TokenCredential
+
+
+class QuickpulseClient(QuickpulseClientOperationsMixin):  # pylint: disable=client-accepts-api-version-keyword
+    """Quickpulse Client.
+
+    :param credential: Credential needed for the client to connect to Azure. Required.
+    :type credential: ~azure.core.credentials.TokenCredential
+    :keyword api_version: Api Version. Default value is "2024-04-01-preview". Note that overriding
+     this default value may result in unsupported behavior.
+    :paramtype api_version: str
+    """
+
+    def __init__(self, credential: "TokenCredential", **kwargs: Any) -> None:
+        _endpoint = "{endpoint}"
+        self._config = QuickpulseClientConfiguration(credential=credential, **kwargs)
+        _policies = kwargs.pop("policies", None)
+        if _policies is None:
+            _policies = [
+                policies.RequestIdPolicy(**kwargs),
+                self._config.headers_policy,
+                self._config.user_agent_policy,
+                self._config.proxy_policy,
+                policies.ContentDecodePolicy(**kwargs),
+                self._config.redirect_policy,
+                self._config.retry_policy,
+                self._config.authentication_policy,
+                self._config.custom_hook_policy,
+                self._config.logging_policy,
+                policies.DistributedTracingPolicy(**kwargs),
+                policies.SensitiveHeaderCleanupPolicy(**kwargs) if self._config.redirect_policy else None,
+                self._config.http_logging_policy,
+            ]
+        self._client: PipelineClient = PipelineClient(base_url=_endpoint, policies=_policies, **kwargs)
+
+        client_models = {k: v for k, v in _models.__dict__.items() if isinstance(v, type)}
+        self._serialize = Serializer(client_models)
+        self._deserialize = Deserializer(client_models)
+        self._serialize.client_side_validation = False
+
+    def send_request(self, request: HttpRequest, *, stream: bool = False, **kwargs: Any) -> HttpResponse:
+        """Runs the network request through the client's chained policies.
+
+        >>> from azure.core.rest import HttpRequest
+        >>> request = HttpRequest("GET", "https://www.example.org/")
+        <HttpRequest [GET], url: 'https://www.example.org/'>
+        >>> response = client.send_request(request)
+        <HttpResponse: 200 OK>
+
+        For more information on this code flow, see https://aka.ms/azsdk/dpcodegen/python/send_request
+
+        :param request: The network request you want to make. Required.
+        :type request: ~azure.core.rest.HttpRequest
+        :keyword bool stream: Whether the response payload will be streamed. Defaults to False.
+        :return: The response of your network call. Does not do error handling on your response.
+        :rtype: ~azure.core.rest.HttpResponse
+        """
+
+        request_copy = deepcopy(request)
+        request_copy.url = self._client.format_url(request_copy.url)
+        return self._client.send_request(request_copy, stream=stream, **kwargs)  # type: ignore
+
+    def close(self) -> None:
+        self._client.close()
+
+    def __enter__(self) -> "QuickpulseClient":
+        self._client.__enter__()
+        return self
+
+    def __exit__(self, *exc_details: Any) -> None:
+        self._client.__exit__(*exc_details)
diff --git a/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/_configuration.py b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/_configuration.py
new file mode 100644
index 00000000..bad8b6a9
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/_configuration.py
@@ -0,0 +1,59 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from typing import Any, TYPE_CHECKING
+
+from azure.core.pipeline import policies
+
+if TYPE_CHECKING:
+    # pylint: disable=unused-import,ungrouped-imports
+    from azure.core.credentials import TokenCredential
+
+VERSION = "unknown"
+
+
+class QuickpulseClientConfiguration:  # pylint: disable=too-many-instance-attributes,name-too-long
+    """Configuration for QuickpulseClient.
+
+    Note that all parameters used to create this instance are saved as instance
+    attributes.
+
+    :param credential: Credential needed for the client to connect to Azure. Required.
+    :type credential: ~azure.core.credentials.TokenCredential
+    :keyword api_version: Api Version. Default value is "2024-04-01-preview". Note that overriding
+     this default value may result in unsupported behavior.
+    :paramtype api_version: str
+    """
+
+    def __init__(self, credential: "TokenCredential", **kwargs: Any) -> None:
+        api_version: str = kwargs.pop("api_version", "2024-04-01-preview")
+
+        # if credential is None:
+        #     raise ValueError("Parameter 'credential' must not be None.")
+
+        self.credential = credential
+        self.api_version = api_version
+        self.credential_scopes = kwargs.pop("credential_scopes", ["https://monitor.azure.com/.default"])
+        kwargs.setdefault("sdk_moniker", "quickpulseclient/{}".format(VERSION))
+        self.polling_interval = kwargs.get("polling_interval", 30)
+        self._configure(**kwargs)
+
+    def _configure(self, **kwargs: Any) -> None:
+        self.user_agent_policy = kwargs.get("user_agent_policy") or policies.UserAgentPolicy(**kwargs)
+        self.headers_policy = kwargs.get("headers_policy") or policies.HeadersPolicy(**kwargs)
+        self.proxy_policy = kwargs.get("proxy_policy") or policies.ProxyPolicy(**kwargs)
+        self.logging_policy = kwargs.get("logging_policy") or policies.NetworkTraceLoggingPolicy(**kwargs)
+        self.http_logging_policy = kwargs.get("http_logging_policy") or policies.HttpLoggingPolicy(**kwargs)
+        self.custom_hook_policy = kwargs.get("custom_hook_policy") or policies.CustomHookPolicy(**kwargs)
+        self.redirect_policy = kwargs.get("redirect_policy") or policies.RedirectPolicy(**kwargs)
+        self.retry_policy = kwargs.get("retry_policy") or policies.RetryPolicy(**kwargs)
+        self.authentication_policy = kwargs.get("authentication_policy")
+        # if self.credential and not self.authentication_policy:
+        #     self.authentication_policy = policies.BearerTokenCredentialPolicy(
+        #         self.credential, *self.credential_scopes, **kwargs
+        #     )
diff --git a/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/_operations/__init__.py b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/_operations/__init__.py
new file mode 100644
index 00000000..3d1697f9
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/_operations/__init__.py
@@ -0,0 +1,19 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from ._operations import QuickpulseClientOperationsMixin
+
+from ._patch import __all__ as _patch_all
+from ._patch import *  # pylint: disable=unused-wildcard-import
+from ._patch import patch_sdk as _patch_sdk
+
+__all__ = [
+    "QuickpulseClientOperationsMixin",
+]
+__all__.extend([p for p in _patch_all if p not in __all__])
+_patch_sdk()
diff --git a/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/_operations/_operations.py b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/_operations/_operations.py
new file mode 100644
index 00000000..270eb34f
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/_operations/_operations.py
@@ -0,0 +1,544 @@
+# pylint: disable=too-many-lines,too-many-statements
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from io import IOBase
+import sys
+from typing import Any, Callable, Dict, IO, List, Optional, Type, TypeVar, Union, overload
+
+from azure.core.exceptions import (
+    ClientAuthenticationError,
+    HttpResponseError,
+    ResourceExistsError,
+    ResourceNotFoundError,
+    ResourceNotModifiedError,
+    map_error,
+)
+from azure.core.pipeline import PipelineResponse
+from azure.core.rest import HttpRequest, HttpResponse
+from azure.core.tracing.decorator import distributed_trace
+from azure.core.utils import case_insensitive_dict
+
+from .. import models as _models
+from .._serialization import Serializer
+from .._vendor import QuickpulseClientMixinABC
+
+if sys.version_info >= (3, 9):
+    from collections.abc import MutableMapping
+else:
+    from typing import MutableMapping  # type: ignore  # pylint: disable=ungrouped-imports
+T = TypeVar("T")
+ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
+
+_SERIALIZER = Serializer()
+_SERIALIZER.client_side_validation = False
+
+
+def build_quickpulse_is_subscribed_request(
+    *,
+    ikey: str,
+    transmission_time: Optional[int] = None,
+    machine_name: Optional[str] = None,
+    instance_name: Optional[str] = None,
+    stream_id: Optional[str] = None,
+    role_name: Optional[str] = None,
+    invariant_version: Optional[str] = None,
+    configuration_etag: Optional[str] = None,
+    **kwargs: Any
+) -> HttpRequest:
+    _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
+    _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+    content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
+    api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-04-01-preview"))
+    accept = _headers.pop("Accept", "application/json")
+
+    # Construct URL
+    _url = "/QuickPulseService.svc/ping"
+
+    # Construct parameters
+    _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
+    _params["ikey"] = _SERIALIZER.query("ikey", ikey, "str")
+
+    # Construct headers
+    if transmission_time is not None:
+        _headers["x-ms-qps-transmission-time"] = _SERIALIZER.header("transmission_time", transmission_time, "int")
+    if machine_name is not None:
+        _headers["x-ms-qps-machine-name"] = _SERIALIZER.header("machine_name", machine_name, "str")
+    if instance_name is not None:
+        _headers["x-ms-qps-instance-name"] = _SERIALIZER.header("instance_name", instance_name, "str")
+    if stream_id is not None:
+        _headers["x-ms-qps-stream-id"] = _SERIALIZER.header("stream_id", stream_id, "str")
+    if role_name is not None:
+        _headers["x-ms-qps-role-name"] = _SERIALIZER.header("role_name", role_name, "str")
+    if invariant_version is not None:
+        _headers["x-ms-qps-invariant-version"] = _SERIALIZER.header("invariant_version", invariant_version, "str")
+    if configuration_etag is not None:
+        _headers["x-ms-qps-configuration-etag"] = _SERIALIZER.header("configuration_etag", configuration_etag, "str")
+    if content_type is not None:
+        _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str")
+    _headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
+
+    return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs)
+
+
+def build_quickpulse_publish_request(
+    *, ikey: str, configuration_etag: Optional[str] = None, transmission_time: Optional[int] = None, **kwargs: Any
+) -> HttpRequest:
+    _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
+    _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+    content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
+    api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-04-01-preview"))
+    accept = _headers.pop("Accept", "application/json")
+
+    # Construct URL
+    _url = "/QuickPulseService.svc/post"
+
+    # Construct parameters
+    _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
+    _params["ikey"] = _SERIALIZER.query("ikey", ikey, "str")
+
+    # Construct headers
+    if configuration_etag is not None:
+        _headers["x-ms-qps-configuration-etag"] = _SERIALIZER.header("configuration_etag", configuration_etag, "str")
+    if transmission_time is not None:
+        _headers["x-ms-qps-transmission-time"] = _SERIALIZER.header("transmission_time", transmission_time, "int")
+    if content_type is not None:
+        _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str")
+    _headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
+
+    return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs)
+
+
+class QuickpulseClientOperationsMixin(QuickpulseClientMixinABC):
+    @overload
+    def is_subscribed(
+        self,
+        endpoint: str = "https://global.livediagnostics.monitor.azure.com",
+        monitoring_data_point: Optional[_models.MonitoringDataPoint] = None,
+        *,
+        ikey: str,
+        transmission_time: Optional[int] = None,
+        machine_name: Optional[str] = None,
+        instance_name: Optional[str] = None,
+        stream_id: Optional[str] = None,
+        role_name: Optional[str] = None,
+        invariant_version: Optional[str] = None,
+        configuration_etag: Optional[str] = None,
+        content_type: str = "application/json",
+        **kwargs: Any
+    ) -> _models.CollectionConfigurationInfo:
+        """Determine whether there is any subscription to the metrics and documents.
+
+        :param endpoint: The endpoint of the Live Metrics service. Default value is
+         "https://global.livediagnostics.monitor.azure.com".
+        :type endpoint: str
+        :param monitoring_data_point: Data contract between Application Insights client SDK and Live
+         Metrics. /QuickPulseService.svc/ping uses this as a backup source of machine name, instance
+         name and invariant version. Default value is None.
+        :type monitoring_data_point: ~quickpulse_client.models.MonitoringDataPoint
+        :keyword ikey: The instrumentation key of the target Application Insights component for which
+         the client checks whether there's any subscription to it. Required.
+        :paramtype ikey: str
+        :keyword transmission_time: Timestamp when the client transmits the metrics and documents to
+         Live Metrics. A 8-byte long type of ticks. Default value is None.
+        :paramtype transmission_time: int
+        :keyword machine_name: Computer name where Application Insights SDK lives. Live Metrics uses
+         machine name with instance name as a backup. Default value is None.
+        :paramtype machine_name: str
+        :keyword instance_name: Service instance name where Application Insights SDK lives. Live
+         Metrics uses machine name with instance name as a backup. Default value is None.
+        :paramtype instance_name: str
+        :keyword stream_id: Identifies an Application Insights SDK as trusted agent to report metrics
+         and documents. Default value is None.
+        :paramtype stream_id: str
+        :keyword role_name: Cloud role name of the service. Default value is None.
+        :paramtype role_name: str
+        :keyword invariant_version: Version/generation of the data contract (MonitoringDataPoint)
+         between the client and Live Metrics. Default value is None.
+        :paramtype invariant_version: str
+        :keyword configuration_etag: An encoded string that indicates whether the collection
+         configuration is changed. Default value is None.
+        :paramtype configuration_etag: str
+        :keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
+         Default value is "application/json".
+        :paramtype content_type: str
+        :return: CollectionConfigurationInfo
+        :rtype: ~quickpulse_client.models.CollectionConfigurationInfo
+        :raises ~azure.core.exceptions.HttpResponseError:
+        """
+
+    @overload
+    def is_subscribed(
+        self,
+        endpoint: str = "https://global.livediagnostics.monitor.azure.com",
+        monitoring_data_point: Optional[IO[bytes]] = None,
+        *,
+        ikey: str,
+        transmission_time: Optional[int] = None,
+        machine_name: Optional[str] = None,
+        instance_name: Optional[str] = None,
+        stream_id: Optional[str] = None,
+        role_name: Optional[str] = None,
+        invariant_version: Optional[str] = None,
+        configuration_etag: Optional[str] = None,
+        content_type: str = "application/json",
+        **kwargs: Any
+    ) -> _models.CollectionConfigurationInfo:
+        """Determine whether there is any subscription to the metrics and documents.
+
+        :param endpoint: The endpoint of the Live Metrics service. Default value is
+         "https://global.livediagnostics.monitor.azure.com".
+        :type endpoint: str
+        :param monitoring_data_point: Data contract between Application Insights client SDK and Live
+         Metrics. /QuickPulseService.svc/ping uses this as a backup source of machine name, instance
+         name and invariant version. Default value is None.
+        :type monitoring_data_point: IO[bytes]
+        :keyword ikey: The instrumentation key of the target Application Insights component for which
+         the client checks whether there's any subscription to it. Required.
+        :paramtype ikey: str
+        :keyword transmission_time: Timestamp when the client transmits the metrics and documents to
+         Live Metrics. A 8-byte long type of ticks. Default value is None.
+        :paramtype transmission_time: int
+        :keyword machine_name: Computer name where Application Insights SDK lives. Live Metrics uses
+         machine name with instance name as a backup. Default value is None.
+        :paramtype machine_name: str
+        :keyword instance_name: Service instance name where Application Insights SDK lives. Live
+         Metrics uses machine name with instance name as a backup. Default value is None.
+        :paramtype instance_name: str
+        :keyword stream_id: Identifies an Application Insights SDK as trusted agent to report metrics
+         and documents. Default value is None.
+        :paramtype stream_id: str
+        :keyword role_name: Cloud role name of the service. Default value is None.
+        :paramtype role_name: str
+        :keyword invariant_version: Version/generation of the data contract (MonitoringDataPoint)
+         between the client and Live Metrics. Default value is None.
+        :paramtype invariant_version: str
+        :keyword configuration_etag: An encoded string that indicates whether the collection
+         configuration is changed. Default value is None.
+        :paramtype configuration_etag: str
+        :keyword content_type: Body Parameter content-type. Content type parameter for binary body.
+         Default value is "application/json".
+        :paramtype content_type: str
+        :return: CollectionConfigurationInfo
+        :rtype: ~quickpulse_client.models.CollectionConfigurationInfo
+        :raises ~azure.core.exceptions.HttpResponseError:
+        """
+
+    # @distributed_trace
+    def is_subscribed(
+        self,
+        endpoint: str = "https://global.livediagnostics.monitor.azure.com",
+        monitoring_data_point: Optional[Union[_models.MonitoringDataPoint, IO[bytes]]] = None,
+        *,
+        ikey: str,
+        transmission_time: Optional[int] = None,
+        machine_name: Optional[str] = None,
+        instance_name: Optional[str] = None,
+        stream_id: Optional[str] = None,
+        role_name: Optional[str] = None,
+        invariant_version: Optional[str] = None,
+        configuration_etag: Optional[str] = None,
+        **kwargs: Any
+    ) -> _models.CollectionConfigurationInfo:
+        """Determine whether there is any subscription to the metrics and documents.
+
+        :param endpoint: The endpoint of the Live Metrics service. Default value is
+         "https://global.livediagnostics.monitor.azure.com".
+        :type endpoint: str
+        :param monitoring_data_point: Data contract between Application Insights client SDK and Live
+         Metrics. /QuickPulseService.svc/ping uses this as a backup source of machine name, instance
+         name and invariant version. Is either a MonitoringDataPoint type or a IO[bytes] type. Default
+         value is None.
+        :type monitoring_data_point: ~quickpulse_client.models.MonitoringDataPoint or IO[bytes]
+        :keyword ikey: The instrumentation key of the target Application Insights component for which
+         the client checks whether there's any subscription to it. Required.
+        :paramtype ikey: str
+        :keyword transmission_time: Timestamp when the client transmits the metrics and documents to
+         Live Metrics. A 8-byte long type of ticks. Default value is None.
+        :paramtype transmission_time: int
+        :keyword machine_name: Computer name where Application Insights SDK lives. Live Metrics uses
+         machine name with instance name as a backup. Default value is None.
+        :paramtype machine_name: str
+        :keyword instance_name: Service instance name where Application Insights SDK lives. Live
+         Metrics uses machine name with instance name as a backup. Default value is None.
+        :paramtype instance_name: str
+        :keyword stream_id: Identifies an Application Insights SDK as trusted agent to report metrics
+         and documents. Default value is None.
+        :paramtype stream_id: str
+        :keyword role_name: Cloud role name of the service. Default value is None.
+        :paramtype role_name: str
+        :keyword invariant_version: Version/generation of the data contract (MonitoringDataPoint)
+         between the client and Live Metrics. Default value is None.
+        :paramtype invariant_version: str
+        :keyword configuration_etag: An encoded string that indicates whether the collection
+         configuration is changed. Default value is None.
+        :paramtype configuration_etag: str
+        :return: CollectionConfigurationInfo
+        :rtype: ~quickpulse_client.models.CollectionConfigurationInfo
+        :raises ~azure.core.exceptions.HttpResponseError:
+        """
+        error_map: MutableMapping[int, Type[HttpResponseError]] = {
+            401: ClientAuthenticationError,
+            404: ResourceNotFoundError,
+            409: ResourceExistsError,
+            304: ResourceNotModifiedError,
+        }
+        error_map.update(kwargs.pop("error_map", {}) or {})
+
+        _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
+        _params = kwargs.pop("params", {}) or {}
+
+        content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
+        cls: ClsType[_models.CollectionConfigurationInfo] = kwargs.pop("cls", None)
+
+        content_type = content_type or "application/json"
+        _json = None
+        _content = None
+        if isinstance(monitoring_data_point, (IOBase, bytes)):
+            _content = monitoring_data_point
+        else:
+            if monitoring_data_point is not None:
+                _json = self._serialize.body(monitoring_data_point, "MonitoringDataPoint")
+            else:
+                _json = None
+
+        _request = build_quickpulse_is_subscribed_request(
+            ikey=ikey,
+            transmission_time=transmission_time,
+            machine_name=machine_name,
+            instance_name=instance_name,
+            stream_id=stream_id,
+            role_name=role_name,
+            invariant_version=invariant_version,
+            configuration_etag=configuration_etag,
+            content_type=content_type,
+            api_version=self._config.api_version,
+            json=_json,
+            content=_content,
+            headers=_headers,
+            params=_params,
+        )
+        path_format_arguments = {
+            "endpoint": self._serialize.url("endpoint", endpoint, "str", skip_quote=True),
+        }
+        _request.url = self._client.format_url(_request.url, **path_format_arguments)
+
+        _stream = False
+        pipeline_response: PipelineResponse = self._client._pipeline.run(  # pylint: disable=protected-access
+            _request, stream=_stream, **kwargs
+        )
+
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            if _stream:
+                response.read()  # Load the body in memory and close the socket
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ServiceError, pipeline_response)
+            raise HttpResponseError(response=response, model=error)
+
+        response_headers = {}
+        response_headers["x-ms-qps-configuration-etag"] = self._deserialize(
+            "str", response.headers.get("x-ms-qps-configuration-etag")
+        )
+        response_headers["x-ms-qps-service-endpoint-redirect-v2"] = self._deserialize(
+            "str", response.headers.get("x-ms-qps-service-endpoint-redirect-v2")
+        )
+        response_headers["x-ms-qps-service-polling-interval-hint"] = self._deserialize(
+            "str", response.headers.get("x-ms-qps-service-polling-interval-hint")
+        )
+        response_headers["x-ms-qps-subscribed"] = self._deserialize("str", response.headers.get("x-ms-qps-subscribed"))
+
+        deserialized = self._deserialize("CollectionConfigurationInfo", pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, response_headers)  # type: ignore
+
+        return deserialized  # type: ignore
+
+    @overload
+    def publish(
+        self,
+        endpoint: str = "https://global.livediagnostics.monitor.azure.com",
+        monitoring_data_points: Optional[List[_models.MonitoringDataPoint]] = None,
+        *,
+        ikey: str,
+        configuration_etag: Optional[str] = None,
+        transmission_time: Optional[int] = None,
+        content_type: str = "application/json",
+        **kwargs: Any
+    ) -> _models.CollectionConfigurationInfo:
+        """Publish live metrics to the Live Metrics service when there is an active subscription to the
+        metrics.
+
+        :param endpoint: The endpoint of the Live Metrics service. Default value is
+         "https://global.livediagnostics.monitor.azure.com".
+        :type endpoint: str
+        :param monitoring_data_points: Data contract between the client and Live Metrics.
+         /QuickPulseService.svc/ping uses this as a backup source of machine name, instance name and
+         invariant version. Default value is None.
+        :type monitoring_data_points: list[~quickpulse_client.models.MonitoringDataPoint]
+        :keyword ikey: The instrumentation key of the target Application Insights component for which
+         the client checks whether there's any subscription to it. Required.
+        :paramtype ikey: str
+        :keyword configuration_etag: An encoded string that indicates whether the collection
+         configuration is changed. Default value is None.
+        :paramtype configuration_etag: str
+        :keyword transmission_time: Timestamp when the client transmits the metrics and documents to
+         Live Metrics. A 8-byte long type of ticks. Default value is None.
+        :paramtype transmission_time: int
+        :keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
+         Default value is "application/json".
+        :paramtype content_type: str
+        :return: CollectionConfigurationInfo
+        :rtype: ~quickpulse_client.models.CollectionConfigurationInfo
+        :raises ~azure.core.exceptions.HttpResponseError:
+        """
+
+    @overload
+    def publish(
+        self,
+        endpoint: str = "https://global.livediagnostics.monitor.azure.com",
+        monitoring_data_points: Optional[IO[bytes]] = None,
+        *,
+        ikey: str,
+        configuration_etag: Optional[str] = None,
+        transmission_time: Optional[int] = None,
+        content_type: str = "application/json",
+        **kwargs: Any
+    ) -> _models.CollectionConfigurationInfo:
+        """Publish live metrics to the Live Metrics service when there is an active subscription to the
+        metrics.
+
+        :param endpoint: The endpoint of the Live Metrics service. Default value is
+         "https://global.livediagnostics.monitor.azure.com".
+        :type endpoint: str
+        :param monitoring_data_points: Data contract between the client and Live Metrics.
+         /QuickPulseService.svc/ping uses this as a backup source of machine name, instance name and
+         invariant version. Default value is None.
+        :type monitoring_data_points: IO[bytes]
+        :keyword ikey: The instrumentation key of the target Application Insights component for which
+         the client checks whether there's any subscription to it. Required.
+        :paramtype ikey: str
+        :keyword configuration_etag: An encoded string that indicates whether the collection
+         configuration is changed. Default value is None.
+        :paramtype configuration_etag: str
+        :keyword transmission_time: Timestamp when the client transmits the metrics and documents to
+         Live Metrics. A 8-byte long type of ticks. Default value is None.
+        :paramtype transmission_time: int
+        :keyword content_type: Body Parameter content-type. Content type parameter for binary body.
+         Default value is "application/json".
+        :paramtype content_type: str
+        :return: CollectionConfigurationInfo
+        :rtype: ~quickpulse_client.models.CollectionConfigurationInfo
+        :raises ~azure.core.exceptions.HttpResponseError:
+        """
+
+    # @distributed_trace
+    def publish(
+        self,
+        endpoint: str = "https://global.livediagnostics.monitor.azure.com",
+        monitoring_data_points: Optional[Union[List[_models.MonitoringDataPoint], IO[bytes]]] = None,
+        *,
+        ikey: str,
+        configuration_etag: Optional[str] = None,
+        transmission_time: Optional[int] = None,
+        **kwargs: Any
+    ) -> _models.CollectionConfigurationInfo:
+        """Publish live metrics to the Live Metrics service when there is an active subscription to the
+        metrics.
+
+        :param endpoint: The endpoint of the Live Metrics service. Default value is
+         "https://global.livediagnostics.monitor.azure.com".
+        :type endpoint: str
+        :param monitoring_data_points: Data contract between the client and Live Metrics.
+         /QuickPulseService.svc/ping uses this as a backup source of machine name, instance name and
+         invariant version. Is either a [MonitoringDataPoint] type or a IO[bytes] type. Default value is
+         None.
+        :type monitoring_data_points: list[~quickpulse_client.models.MonitoringDataPoint] or IO[bytes]
+        :keyword ikey: The instrumentation key of the target Application Insights component for which
+         the client checks whether there's any subscription to it. Required.
+        :paramtype ikey: str
+        :keyword configuration_etag: An encoded string that indicates whether the collection
+         configuration is changed. Default value is None.
+        :paramtype configuration_etag: str
+        :keyword transmission_time: Timestamp when the client transmits the metrics and documents to
+         Live Metrics. A 8-byte long type of ticks. Default value is None.
+        :paramtype transmission_time: int
+        :return: CollectionConfigurationInfo
+        :rtype: ~quickpulse_client.models.CollectionConfigurationInfo
+        :raises ~azure.core.exceptions.HttpResponseError:
+        """
+        error_map: MutableMapping[int, Type[HttpResponseError]] = {
+            401: ClientAuthenticationError,
+            404: ResourceNotFoundError,
+            409: ResourceExistsError,
+            304: ResourceNotModifiedError,
+        }
+        error_map.update(kwargs.pop("error_map", {}) or {})
+
+        _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
+        _params = kwargs.pop("params", {}) or {}
+
+        content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
+        cls: ClsType[_models.CollectionConfigurationInfo] = kwargs.pop("cls", None)
+
+        content_type = content_type or "application/json"
+        _json = None
+        _content = None
+        if isinstance(monitoring_data_points, (IOBase, bytes)):
+            _content = monitoring_data_points
+        else:
+            if monitoring_data_points is not None:
+                _json = self._serialize.body(monitoring_data_points, "[MonitoringDataPoint]")
+            else:
+                _json = None
+
+        _request = build_quickpulse_publish_request(
+            ikey=ikey,
+            configuration_etag=configuration_etag,
+            transmission_time=transmission_time,
+            content_type=content_type,
+            api_version=self._config.api_version,
+            json=_json,
+            content=_content,
+            headers=_headers,
+            params=_params,
+        )
+        path_format_arguments = {
+            "endpoint": self._serialize.url("endpoint", endpoint, "str", skip_quote=True),
+        }
+        _request.url = self._client.format_url(_request.url, **path_format_arguments)
+
+        _stream = False
+        pipeline_response: PipelineResponse = self._client._pipeline.run(  # pylint: disable=protected-access
+            _request, stream=_stream, **kwargs
+        )
+
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            if _stream:
+                response.read()  # Load the body in memory and close the socket
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ServiceError, pipeline_response)
+            raise HttpResponseError(response=response, model=error)
+
+        response_headers = {}
+        response_headers["x-ms-qps-configuration-etag"] = self._deserialize(
+            "str", response.headers.get("x-ms-qps-configuration-etag")
+        )
+        response_headers["x-ms-qps-subscribed"] = self._deserialize("str", response.headers.get("x-ms-qps-subscribed"))
+
+        deserialized = self._deserialize("CollectionConfigurationInfo", pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, response_headers)  # type: ignore
+
+        return deserialized  # type: ignore
diff --git a/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/_operations/_patch.py b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/_operations/_patch.py
new file mode 100644
index 00000000..f7dd3251
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/_operations/_patch.py
@@ -0,0 +1,20 @@
+# ------------------------------------
+# Copyright (c) Microsoft Corporation.
+# Licensed under the MIT License.
+# ------------------------------------
+"""Customize generated code here.
+
+Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize
+"""
+from typing import List
+
+__all__: List[str] = []  # Add all objects you want publicly available to users at this package level
+
+
+def patch_sdk():
+    """Do not remove from this file.
+
+    `patch_sdk` is a last resort escape hatch that allows you to do customizations
+    you can't accomplish using the techniques described in
+    https://aka.ms/azsdk/python/dpcodegen/python/customize
+    """
diff --git a/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/_patch.py b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/_patch.py
new file mode 100644
index 00000000..f7dd3251
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/_patch.py
@@ -0,0 +1,20 @@
+# ------------------------------------
+# Copyright (c) Microsoft Corporation.
+# Licensed under the MIT License.
+# ------------------------------------
+"""Customize generated code here.
+
+Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize
+"""
+from typing import List
+
+__all__: List[str] = []  # Add all objects you want publicly available to users at this package level
+
+
+def patch_sdk():
+    """Do not remove from this file.
+
+    `patch_sdk` is a last resort escape hatch that allows you to do customizations
+    you can't accomplish using the techniques described in
+    https://aka.ms/azsdk/python/dpcodegen/python/customize
+    """
diff --git a/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/_serialization.py b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/_serialization.py
new file mode 100644
index 00000000..2f781d74
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/_serialization.py
@@ -0,0 +1,1998 @@
+# --------------------------------------------------------------------------
+#
+# Copyright (c) Microsoft Corporation. All rights reserved.
+#
+# The MIT License (MIT)
+#
+# Permission is hereby granted, free of charge, to any person obtaining a copy
+# of this software and associated documentation files (the ""Software""), to
+# deal in the Software without restriction, including without limitation the
+# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+# sell copies of the Software, and to permit persons to whom the Software is
+# furnished to do so, subject to the following conditions:
+#
+# The above copyright notice and this permission notice shall be included in
+# all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+# IN THE SOFTWARE.
+#
+# --------------------------------------------------------------------------
+
+# pylint: skip-file
+# pyright: reportUnnecessaryTypeIgnoreComment=false
+
+from base64 import b64decode, b64encode
+import calendar
+import datetime
+import decimal
+import email
+from enum import Enum
+import json
+import logging
+import re
+import sys
+import codecs
+from typing import (
+    Dict,
+    Any,
+    cast,
+    Optional,
+    Union,
+    AnyStr,
+    IO,
+    Mapping,
+    Callable,
+    TypeVar,
+    MutableMapping,
+    Type,
+    List,
+    Mapping,
+)
+
+try:
+    from urllib import quote  # type: ignore
+except ImportError:
+    from urllib.parse import quote
+import xml.etree.ElementTree as ET
+
+import isodate  # type: ignore
+
+from azure.core.exceptions import DeserializationError, SerializationError
+from azure.core.serialization import NULL as CoreNull
+
+_BOM = codecs.BOM_UTF8.decode(encoding="utf-8")
+
+ModelType = TypeVar("ModelType", bound="Model")
+JSON = MutableMapping[str, Any]
+
+
+class RawDeserializer:
+
+    # Accept "text" because we're open minded people...
+    JSON_REGEXP = re.compile(r"^(application|text)/([a-z+.]+\+)?json$")
+
+    # Name used in context
+    CONTEXT_NAME = "deserialized_data"
+
+    @classmethod
+    def deserialize_from_text(cls, data: Optional[Union[AnyStr, IO]], content_type: Optional[str] = None) -> Any:
+        """Decode data according to content-type.
+
+        Accept a stream of data as well, but will be load at once in memory for now.
+
+        If no content-type, will return the string version (not bytes, not stream)
+
+        :param data: Input, could be bytes or stream (will be decoded with UTF8) or text
+        :type data: str or bytes or IO
+        :param str content_type: The content type.
+        """
+        if hasattr(data, "read"):
+            # Assume a stream
+            data = cast(IO, data).read()
+
+        if isinstance(data, bytes):
+            data_as_str = data.decode(encoding="utf-8-sig")
+        else:
+            # Explain to mypy the correct type.
+            data_as_str = cast(str, data)
+
+            # Remove Byte Order Mark if present in string
+            data_as_str = data_as_str.lstrip(_BOM)
+
+        if content_type is None:
+            return data
+
+        if cls.JSON_REGEXP.match(content_type):
+            try:
+                return json.loads(data_as_str)
+            except ValueError as err:
+                raise DeserializationError("JSON is invalid: {}".format(err), err)
+        elif "xml" in (content_type or []):
+            try:
+
+                try:
+                    if isinstance(data, unicode):  # type: ignore
+                        # If I'm Python 2.7 and unicode XML will scream if I try a "fromstring" on unicode string
+                        data_as_str = data_as_str.encode(encoding="utf-8")  # type: ignore
+                except NameError:
+                    pass
+
+                return ET.fromstring(data_as_str)  # nosec
+            except ET.ParseError as err:
+                # It might be because the server has an issue, and returned JSON with
+                # content-type XML....
+                # So let's try a JSON load, and if it's still broken
+                # let's flow the initial exception
+                def _json_attemp(data):
+                    try:
+                        return True, json.loads(data)
+                    except ValueError:
+                        return False, None  # Don't care about this one
+
+                success, json_result = _json_attemp(data)
+                if success:
+                    return json_result
+                # If i'm here, it's not JSON, it's not XML, let's scream
+                # and raise the last context in this block (the XML exception)
+                # The function hack is because Py2.7 messes up with exception
+                # context otherwise.
+                _LOGGER.critical("Wasn't XML not JSON, failing")
+                raise DeserializationError("XML is invalid") from err
+        raise DeserializationError("Cannot deserialize content-type: {}".format(content_type))
+
+    @classmethod
+    def deserialize_from_http_generics(cls, body_bytes: Optional[Union[AnyStr, IO]], headers: Mapping) -> Any:
+        """Deserialize from HTTP response.
+
+        Use bytes and headers to NOT use any requests/aiohttp or whatever
+        specific implementation.
+        Headers will tested for "content-type"
+        """
+        # Try to use content-type from headers if available
+        content_type = None
+        if "content-type" in headers:
+            content_type = headers["content-type"].split(";")[0].strip().lower()
+        # Ouch, this server did not declare what it sent...
+        # Let's guess it's JSON...
+        # Also, since Autorest was considering that an empty body was a valid JSON,
+        # need that test as well....
+        else:
+            content_type = "application/json"
+
+        if body_bytes:
+            return cls.deserialize_from_text(body_bytes, content_type)
+        return None
+
+
+_LOGGER = logging.getLogger(__name__)
+
+try:
+    _long_type = long  # type: ignore
+except NameError:
+    _long_type = int
+
+
+class UTC(datetime.tzinfo):
+    """Time Zone info for handling UTC"""
+
+    def utcoffset(self, dt):
+        """UTF offset for UTC is 0."""
+        return datetime.timedelta(0)
+
+    def tzname(self, dt):
+        """Timestamp representation."""
+        return "Z"
+
+    def dst(self, dt):
+        """No daylight saving for UTC."""
+        return datetime.timedelta(hours=1)
+
+
+try:
+    from datetime import timezone as _FixedOffset  # type: ignore
+except ImportError:  # Python 2.7
+
+    class _FixedOffset(datetime.tzinfo):  # type: ignore
+        """Fixed offset in minutes east from UTC.
+        Copy/pasted from Python doc
+        :param datetime.timedelta offset: offset in timedelta format
+        """
+
+        def __init__(self, offset):
+            self.__offset = offset
+
+        def utcoffset(self, dt):
+            return self.__offset
+
+        def tzname(self, dt):
+            return str(self.__offset.total_seconds() / 3600)
+
+        def __repr__(self):
+            return "<FixedOffset {}>".format(self.tzname(None))
+
+        def dst(self, dt):
+            return datetime.timedelta(0)
+
+        def __getinitargs__(self):
+            return (self.__offset,)
+
+
+try:
+    from datetime import timezone
+
+    TZ_UTC = timezone.utc
+except ImportError:
+    TZ_UTC = UTC()  # type: ignore
+
+_FLATTEN = re.compile(r"(?<!\\)\.")
+
+
+def attribute_transformer(key, attr_desc, value):
+    """A key transformer that returns the Python attribute.
+
+    :param str key: The attribute name
+    :param dict attr_desc: The attribute metadata
+    :param object value: The value
+    :returns: A key using attribute name
+    """
+    return (key, value)
+
+
+def full_restapi_key_transformer(key, attr_desc, value):
+    """A key transformer that returns the full RestAPI key path.
+
+    :param str _: The attribute name
+    :param dict attr_desc: The attribute metadata
+    :param object value: The value
+    :returns: A list of keys using RestAPI syntax.
+    """
+    keys = _FLATTEN.split(attr_desc["key"])
+    return ([_decode_attribute_map_key(k) for k in keys], value)
+
+
+def last_restapi_key_transformer(key, attr_desc, value):
+    """A key transformer that returns the last RestAPI key.
+
+    :param str key: The attribute name
+    :param dict attr_desc: The attribute metadata
+    :param object value: The value
+    :returns: The last RestAPI key.
+    """
+    key, value = full_restapi_key_transformer(key, attr_desc, value)
+    return (key[-1], value)
+
+
+def _create_xml_node(tag, prefix=None, ns=None):
+    """Create a XML node."""
+    if prefix and ns:
+        ET.register_namespace(prefix, ns)
+    if ns:
+        return ET.Element("{" + ns + "}" + tag)
+    else:
+        return ET.Element(tag)
+
+
+class Model(object):
+    """Mixin for all client request body/response body models to support
+    serialization and deserialization.
+    """
+
+    _subtype_map: Dict[str, Dict[str, Any]] = {}
+    _attribute_map: Dict[str, Dict[str, Any]] = {}
+    _validation: Dict[str, Dict[str, Any]] = {}
+
+    def __init__(self, **kwargs: Any) -> None:
+        self.additional_properties: Optional[Dict[str, Any]] = {}
+        for k in kwargs:
+            if k not in self._attribute_map:
+                _LOGGER.warning("%s is not a known attribute of class %s and will be ignored", k, self.__class__)
+            elif k in self._validation and self._validation[k].get("readonly", False):
+                _LOGGER.warning("Readonly attribute %s will be ignored in class %s", k, self.__class__)
+            else:
+                setattr(self, k, kwargs[k])
+
+    def __eq__(self, other: Any) -> bool:
+        """Compare objects by comparing all attributes."""
+        if isinstance(other, self.__class__):
+            return self.__dict__ == other.__dict__
+        return False
+
+    def __ne__(self, other: Any) -> bool:
+        """Compare objects by comparing all attributes."""
+        return not self.__eq__(other)
+
+    def __str__(self) -> str:
+        return str(self.__dict__)
+
+    @classmethod
+    def enable_additional_properties_sending(cls) -> None:
+        cls._attribute_map["additional_properties"] = {"key": "", "type": "{object}"}
+
+    @classmethod
+    def is_xml_model(cls) -> bool:
+        try:
+            cls._xml_map  # type: ignore
+        except AttributeError:
+            return False
+        return True
+
+    @classmethod
+    def _create_xml_node(cls):
+        """Create XML node."""
+        try:
+            xml_map = cls._xml_map  # type: ignore
+        except AttributeError:
+            xml_map = {}
+
+        return _create_xml_node(xml_map.get("name", cls.__name__), xml_map.get("prefix", None), xml_map.get("ns", None))
+
+    def serialize(self, keep_readonly: bool = False, **kwargs: Any) -> JSON:
+        """Return the JSON that would be sent to server from this model.
+
+        This is an alias to `as_dict(full_restapi_key_transformer, keep_readonly=False)`.
+
+        If you want XML serialization, you can pass the kwargs is_xml=True.
+
+        :param bool keep_readonly: If you want to serialize the readonly attributes
+        :returns: A dict JSON compatible object
+        :rtype: dict
+        """
+        serializer = Serializer(self._infer_class_models())
+        return serializer._serialize(self, keep_readonly=keep_readonly, **kwargs)  # type: ignore
+
+    def as_dict(
+        self,
+        keep_readonly: bool = True,
+        key_transformer: Callable[[str, Dict[str, Any], Any], Any] = attribute_transformer,
+        **kwargs: Any
+    ) -> JSON:
+        """Return a dict that can be serialized using json.dump.
+
+        Advanced usage might optionally use a callback as parameter:
+
+        .. code::python
+
+            def my_key_transformer(key, attr_desc, value):
+                return key
+
+        Key is the attribute name used in Python. Attr_desc
+        is a dict of metadata. Currently contains 'type' with the
+        msrest type and 'key' with the RestAPI encoded key.
+        Value is the current value in this object.
+
+        The string returned will be used to serialize the key.
+        If the return type is a list, this is considered hierarchical
+        result dict.
+
+        See the three examples in this file:
+
+        - attribute_transformer
+        - full_restapi_key_transformer
+        - last_restapi_key_transformer
+
+        If you want XML serialization, you can pass the kwargs is_xml=True.
+
+        :param function key_transformer: A key transformer function.
+        :returns: A dict JSON compatible object
+        :rtype: dict
+        """
+        serializer = Serializer(self._infer_class_models())
+        return serializer._serialize(self, key_transformer=key_transformer, keep_readonly=keep_readonly, **kwargs)  # type: ignore
+
+    @classmethod
+    def _infer_class_models(cls):
+        try:
+            str_models = cls.__module__.rsplit(".", 1)[0]
+            models = sys.modules[str_models]
+            client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)}
+            if cls.__name__ not in client_models:
+                raise ValueError("Not Autorest generated code")
+        except Exception:
+            # Assume it's not Autorest generated (tests?). Add ourselves as dependencies.
+            client_models = {cls.__name__: cls}
+        return client_models
+
+    @classmethod
+    def deserialize(cls: Type[ModelType], data: Any, content_type: Optional[str] = None) -> ModelType:
+        """Parse a str using the RestAPI syntax and return a model.
+
+        :param str data: A str using RestAPI structure. JSON by default.
+        :param str content_type: JSON by default, set application/xml if XML.
+        :returns: An instance of this model
+        :raises: DeserializationError if something went wrong
+        """
+        deserializer = Deserializer(cls._infer_class_models())
+        return deserializer(cls.__name__, data, content_type=content_type)  # type: ignore
+
+    @classmethod
+    def from_dict(
+        cls: Type[ModelType],
+        data: Any,
+        key_extractors: Optional[Callable[[str, Dict[str, Any], Any], Any]] = None,
+        content_type: Optional[str] = None,
+    ) -> ModelType:
+        """Parse a dict using given key extractor return a model.
+
+        By default consider key
+        extractors (rest_key_case_insensitive_extractor, attribute_key_case_insensitive_extractor
+        and last_rest_key_case_insensitive_extractor)
+
+        :param dict data: A dict using RestAPI structure
+        :param str content_type: JSON by default, set application/xml if XML.
+        :returns: An instance of this model
+        :raises: DeserializationError if something went wrong
+        """
+        deserializer = Deserializer(cls._infer_class_models())
+        deserializer.key_extractors = (  # type: ignore
+            [  # type: ignore
+                attribute_key_case_insensitive_extractor,
+                rest_key_case_insensitive_extractor,
+                last_rest_key_case_insensitive_extractor,
+            ]
+            if key_extractors is None
+            else key_extractors
+        )
+        return deserializer(cls.__name__, data, content_type=content_type)  # type: ignore
+
+    @classmethod
+    def _flatten_subtype(cls, key, objects):
+        if "_subtype_map" not in cls.__dict__:
+            return {}
+        result = dict(cls._subtype_map[key])
+        for valuetype in cls._subtype_map[key].values():
+            result.update(objects[valuetype]._flatten_subtype(key, objects))
+        return result
+
+    @classmethod
+    def _classify(cls, response, objects):
+        """Check the class _subtype_map for any child classes.
+        We want to ignore any inherited _subtype_maps.
+        Remove the polymorphic key from the initial data.
+        """
+        for subtype_key in cls.__dict__.get("_subtype_map", {}).keys():
+            subtype_value = None
+
+            if not isinstance(response, ET.Element):
+                rest_api_response_key = cls._get_rest_key_parts(subtype_key)[-1]
+                subtype_value = response.pop(rest_api_response_key, None) or response.pop(subtype_key, None)
+            else:
+                subtype_value = xml_key_extractor(subtype_key, cls._attribute_map[subtype_key], response)
+            if subtype_value:
+                # Try to match base class. Can be class name only
+                # (bug to fix in Autorest to support x-ms-discriminator-name)
+                if cls.__name__ == subtype_value:
+                    return cls
+                flatten_mapping_type = cls._flatten_subtype(subtype_key, objects)
+                try:
+                    return objects[flatten_mapping_type[subtype_value]]  # type: ignore
+                except KeyError:
+                    _LOGGER.warning(
+                        "Subtype value %s has no mapping, use base class %s.",
+                        subtype_value,
+                        cls.__name__,
+                    )
+                    break
+            else:
+                _LOGGER.warning("Discriminator %s is absent or null, use base class %s.", subtype_key, cls.__name__)
+                break
+        return cls
+
+    @classmethod
+    def _get_rest_key_parts(cls, attr_key):
+        """Get the RestAPI key of this attr, split it and decode part
+        :param str attr_key: Attribute key must be in attribute_map.
+        :returns: A list of RestAPI part
+        :rtype: list
+        """
+        rest_split_key = _FLATTEN.split(cls._attribute_map[attr_key]["key"])
+        return [_decode_attribute_map_key(key_part) for key_part in rest_split_key]
+
+
+def _decode_attribute_map_key(key):
+    """This decode a key in an _attribute_map to the actual key we want to look at
+    inside the received data.
+
+    :param str key: A key string from the generated code
+    """
+    return key.replace("\\.", ".")
+
+
+class Serializer(object):
+    """Request object model serializer."""
+
+    basic_types = {str: "str", int: "int", bool: "bool", float: "float"}
+
+    _xml_basic_types_serializers = {"bool": lambda x: str(x).lower()}
+    days = {0: "Mon", 1: "Tue", 2: "Wed", 3: "Thu", 4: "Fri", 5: "Sat", 6: "Sun"}
+    months = {
+        1: "Jan",
+        2: "Feb",
+        3: "Mar",
+        4: "Apr",
+        5: "May",
+        6: "Jun",
+        7: "Jul",
+        8: "Aug",
+        9: "Sep",
+        10: "Oct",
+        11: "Nov",
+        12: "Dec",
+    }
+    validation = {
+        "min_length": lambda x, y: len(x) < y,
+        "max_length": lambda x, y: len(x) > y,
+        "minimum": lambda x, y: x < y,
+        "maximum": lambda x, y: x > y,
+        "minimum_ex": lambda x, y: x <= y,
+        "maximum_ex": lambda x, y: x >= y,
+        "min_items": lambda x, y: len(x) < y,
+        "max_items": lambda x, y: len(x) > y,
+        "pattern": lambda x, y: not re.match(y, x, re.UNICODE),
+        "unique": lambda x, y: len(x) != len(set(x)),
+        "multiple": lambda x, y: x % y != 0,
+    }
+
+    def __init__(self, classes: Optional[Mapping[str, type]] = None):
+        self.serialize_type = {
+            "iso-8601": Serializer.serialize_iso,
+            "rfc-1123": Serializer.serialize_rfc,
+            "unix-time": Serializer.serialize_unix,
+            "duration": Serializer.serialize_duration,
+            "date": Serializer.serialize_date,
+            "time": Serializer.serialize_time,
+            "decimal": Serializer.serialize_decimal,
+            "long": Serializer.serialize_long,
+            "bytearray": Serializer.serialize_bytearray,
+            "base64": Serializer.serialize_base64,
+            "object": self.serialize_object,
+            "[]": self.serialize_iter,
+            "{}": self.serialize_dict,
+        }
+        self.dependencies: Dict[str, type] = dict(classes) if classes else {}
+        self.key_transformer = full_restapi_key_transformer
+        self.client_side_validation = True
+
+    def _serialize(self, target_obj, data_type=None, **kwargs):
+        """Serialize data into a string according to type.
+
+        :param target_obj: The data to be serialized.
+        :param str data_type: The type to be serialized from.
+        :rtype: str, dict
+        :raises: SerializationError if serialization fails.
+        """
+        key_transformer = kwargs.get("key_transformer", self.key_transformer)
+        keep_readonly = kwargs.get("keep_readonly", False)
+        if target_obj is None:
+            return None
+
+        attr_name = None
+        class_name = target_obj.__class__.__name__
+
+        if data_type:
+            return self.serialize_data(target_obj, data_type, **kwargs)
+
+        if not hasattr(target_obj, "_attribute_map"):
+            data_type = type(target_obj).__name__
+            if data_type in self.basic_types.values():
+                return self.serialize_data(target_obj, data_type, **kwargs)
+
+        # Force "is_xml" kwargs if we detect a XML model
+        try:
+            is_xml_model_serialization = kwargs["is_xml"]
+        except KeyError:
+            is_xml_model_serialization = kwargs.setdefault("is_xml", target_obj.is_xml_model())
+
+        serialized = {}
+        if is_xml_model_serialization:
+            serialized = target_obj._create_xml_node()
+        try:
+            attributes = target_obj._attribute_map
+            for attr, attr_desc in attributes.items():
+                attr_name = attr
+                if not keep_readonly and target_obj._validation.get(attr_name, {}).get("readonly", False):
+                    continue
+
+                if attr_name == "additional_properties" and attr_desc["key"] == "":
+                    if target_obj.additional_properties is not None:
+                        serialized.update(target_obj.additional_properties)
+                    continue
+                try:
+
+                    orig_attr = getattr(target_obj, attr)
+                    if is_xml_model_serialization:
+                        pass  # Don't provide "transformer" for XML for now. Keep "orig_attr"
+                    else:  # JSON
+                        keys, orig_attr = key_transformer(attr, attr_desc.copy(), orig_attr)
+                        keys = keys if isinstance(keys, list) else [keys]
+
+                    kwargs["serialization_ctxt"] = attr_desc
+                    new_attr = self.serialize_data(orig_attr, attr_desc["type"], **kwargs)
+
+                    if is_xml_model_serialization:
+                        xml_desc = attr_desc.get("xml", {})
+                        xml_name = xml_desc.get("name", attr_desc["key"])
+                        xml_prefix = xml_desc.get("prefix", None)
+                        xml_ns = xml_desc.get("ns", None)
+                        if xml_desc.get("attr", False):
+                            if xml_ns:
+                                ET.register_namespace(xml_prefix, xml_ns)
+                                xml_name = "{{{}}}{}".format(xml_ns, xml_name)
+                            serialized.set(xml_name, new_attr)  # type: ignore
+                            continue
+                        if xml_desc.get("text", False):
+                            serialized.text = new_attr  # type: ignore
+                            continue
+                        if isinstance(new_attr, list):
+                            serialized.extend(new_attr)  # type: ignore
+                        elif isinstance(new_attr, ET.Element):
+                            # If the down XML has no XML/Name, we MUST replace the tag with the local tag. But keeping the namespaces.
+                            if "name" not in getattr(orig_attr, "_xml_map", {}):
+                                splitted_tag = new_attr.tag.split("}")
+                                if len(splitted_tag) == 2:  # Namespace
+                                    new_attr.tag = "}".join([splitted_tag[0], xml_name])
+                                else:
+                                    new_attr.tag = xml_name
+                            serialized.append(new_attr)  # type: ignore
+                        else:  # That's a basic type
+                            # Integrate namespace if necessary
+                            local_node = _create_xml_node(xml_name, xml_prefix, xml_ns)
+                            local_node.text = str(new_attr)
+                            serialized.append(local_node)  # type: ignore
+                    else:  # JSON
+                        for k in reversed(keys):  # type: ignore
+                            new_attr = {k: new_attr}
+
+                        _new_attr = new_attr
+                        _serialized = serialized
+                        for k in keys:  # type: ignore
+                            if k not in _serialized:
+                                _serialized.update(_new_attr)  # type: ignore
+                            _new_attr = _new_attr[k]  # type: ignore
+                            _serialized = _serialized[k]
+                except ValueError as err:
+                    if isinstance(err, SerializationError):
+                        raise
+
+        except (AttributeError, KeyError, TypeError) as err:
+            msg = "Attribute {} in object {} cannot be serialized.\n{}".format(attr_name, class_name, str(target_obj))
+            raise SerializationError(msg) from err
+        else:
+            return serialized
+
+    def body(self, data, data_type, **kwargs):
+        """Serialize data intended for a request body.
+
+        :param data: The data to be serialized.
+        :param str data_type: The type to be serialized from.
+        :rtype: dict
+        :raises: SerializationError if serialization fails.
+        :raises: ValueError if data is None
+        """
+
+        # Just in case this is a dict
+        internal_data_type_str = data_type.strip("[]{}")
+        internal_data_type = self.dependencies.get(internal_data_type_str, None)
+        try:
+            is_xml_model_serialization = kwargs["is_xml"]
+        except KeyError:
+            if internal_data_type and issubclass(internal_data_type, Model):
+                is_xml_model_serialization = kwargs.setdefault("is_xml", internal_data_type.is_xml_model())
+            else:
+                is_xml_model_serialization = False
+        if internal_data_type and not isinstance(internal_data_type, Enum):
+            try:
+                deserializer = Deserializer(self.dependencies)
+                # Since it's on serialization, it's almost sure that format is not JSON REST
+                # We're not able to deal with additional properties for now.
+                deserializer.additional_properties_detection = False
+                if is_xml_model_serialization:
+                    deserializer.key_extractors = [  # type: ignore
+                        attribute_key_case_insensitive_extractor,
+                    ]
+                else:
+                    deserializer.key_extractors = [
+                        rest_key_case_insensitive_extractor,
+                        attribute_key_case_insensitive_extractor,
+                        last_rest_key_case_insensitive_extractor,
+                    ]
+                data = deserializer._deserialize(data_type, data)
+            except DeserializationError as err:
+                raise SerializationError("Unable to build a model: " + str(err)) from err
+
+        return self._serialize(data, data_type, **kwargs)
+
+    def url(self, name, data, data_type, **kwargs):
+        """Serialize data intended for a URL path.
+
+        :param data: The data to be serialized.
+        :param str data_type: The type to be serialized from.
+        :rtype: str
+        :raises: TypeError if serialization fails.
+        :raises: ValueError if data is None
+        """
+        try:
+            output = self.serialize_data(data, data_type, **kwargs)
+            if data_type == "bool":
+                output = json.dumps(output)
+
+            if kwargs.get("skip_quote") is True:
+                output = str(output)
+                output = output.replace("{", quote("{")).replace("}", quote("}"))
+            else:
+                output = quote(str(output), safe="")
+        except SerializationError:
+            raise TypeError("{} must be type {}.".format(name, data_type))
+        else:
+            return output
+
+    def query(self, name, data, data_type, **kwargs):
+        """Serialize data intended for a URL query.
+
+        :param data: The data to be serialized.
+        :param str data_type: The type to be serialized from.
+        :keyword bool skip_quote: Whether to skip quote the serialized result.
+        Defaults to False.
+        :rtype: str, list
+        :raises: TypeError if serialization fails.
+        :raises: ValueError if data is None
+        """
+        try:
+            # Treat the list aside, since we don't want to encode the div separator
+            if data_type.startswith("["):
+                internal_data_type = data_type[1:-1]
+                do_quote = not kwargs.get("skip_quote", False)
+                return self.serialize_iter(data, internal_data_type, do_quote=do_quote, **kwargs)
+
+            # Not a list, regular serialization
+            output = self.serialize_data(data, data_type, **kwargs)
+            if data_type == "bool":
+                output = json.dumps(output)
+            if kwargs.get("skip_quote") is True:
+                output = str(output)
+            else:
+                output = quote(str(output), safe="")
+        except SerializationError:
+            raise TypeError("{} must be type {}.".format(name, data_type))
+        else:
+            return str(output)
+
+    def header(self, name, data, data_type, **kwargs):
+        """Serialize data intended for a request header.
+
+        :param data: The data to be serialized.
+        :param str data_type: The type to be serialized from.
+        :rtype: str
+        :raises: TypeError if serialization fails.
+        :raises: ValueError if data is None
+        """
+        try:
+            if data_type in ["[str]"]:
+                data = ["" if d is None else d for d in data]
+
+            output = self.serialize_data(data, data_type, **kwargs)
+            if data_type == "bool":
+                output = json.dumps(output)
+        except SerializationError:
+            raise TypeError("{} must be type {}.".format(name, data_type))
+        else:
+            return str(output)
+
+    def serialize_data(self, data, data_type, **kwargs):
+        """Serialize generic data according to supplied data type.
+
+        :param data: The data to be serialized.
+        :param str data_type: The type to be serialized from.
+        :param bool required: Whether it's essential that the data not be
+         empty or None
+        :raises: AttributeError if required data is None.
+        :raises: ValueError if data is None
+        :raises: SerializationError if serialization fails.
+        """
+        if data is None:
+            raise ValueError("No value for given attribute")
+
+        try:
+            if data is CoreNull:
+                return None
+            if data_type in self.basic_types.values():
+                return self.serialize_basic(data, data_type, **kwargs)
+
+            elif data_type in self.serialize_type:
+                return self.serialize_type[data_type](data, **kwargs)
+
+            # If dependencies is empty, try with current data class
+            # It has to be a subclass of Enum anyway
+            enum_type = self.dependencies.get(data_type, data.__class__)
+            if issubclass(enum_type, Enum):
+                return Serializer.serialize_enum(data, enum_obj=enum_type)
+
+            iter_type = data_type[0] + data_type[-1]
+            if iter_type in self.serialize_type:
+                return self.serialize_type[iter_type](data, data_type[1:-1], **kwargs)
+
+        except (ValueError, TypeError) as err:
+            msg = "Unable to serialize value: {!r} as type: {!r}."
+            raise SerializationError(msg.format(data, data_type)) from err
+        else:
+            return self._serialize(data, **kwargs)
+
+    @classmethod
+    def _get_custom_serializers(cls, data_type, **kwargs):
+        custom_serializer = kwargs.get("basic_types_serializers", {}).get(data_type)
+        if custom_serializer:
+            return custom_serializer
+        if kwargs.get("is_xml", False):
+            return cls._xml_basic_types_serializers.get(data_type)
+
+    @classmethod
+    def serialize_basic(cls, data, data_type, **kwargs):
+        """Serialize basic builting data type.
+        Serializes objects to str, int, float or bool.
+
+        Possible kwargs:
+        - basic_types_serializers dict[str, callable] : If set, use the callable as serializer
+        - is_xml bool : If set, use xml_basic_types_serializers
+
+        :param data: Object to be serialized.
+        :param str data_type: Type of object in the iterable.
+        """
+        custom_serializer = cls._get_custom_serializers(data_type, **kwargs)
+        if custom_serializer:
+            return custom_serializer(data)
+        if data_type == "str":
+            return cls.serialize_unicode(data)
+        return eval(data_type)(data)  # nosec
+
+    @classmethod
+    def serialize_unicode(cls, data):
+        """Special handling for serializing unicode strings in Py2.
+        Encode to UTF-8 if unicode, otherwise handle as a str.
+
+        :param data: Object to be serialized.
+        :rtype: str
+        """
+        try:  # If I received an enum, return its value
+            return data.value
+        except AttributeError:
+            pass
+
+        try:
+            if isinstance(data, unicode):  # type: ignore
+                # Don't change it, JSON and XML ElementTree are totally able
+                # to serialize correctly u'' strings
+                return data
+        except NameError:
+            return str(data)
+        else:
+            return str(data)
+
+    def serialize_iter(self, data, iter_type, div=None, **kwargs):
+        """Serialize iterable.
+
+        Supported kwargs:
+        - serialization_ctxt dict : The current entry of _attribute_map, or same format.
+          serialization_ctxt['type'] should be same as data_type.
+        - is_xml bool : If set, serialize as XML
+
+        :param list attr: Object to be serialized.
+        :param str iter_type: Type of object in the iterable.
+        :param bool required: Whether the objects in the iterable must
+         not be None or empty.
+        :param str div: If set, this str will be used to combine the elements
+         in the iterable into a combined string. Default is 'None'.
+        :keyword bool do_quote: Whether to quote the serialized result of each iterable element.
+        Defaults to False.
+        :rtype: list, str
+        """
+        if isinstance(data, str):
+            raise SerializationError("Refuse str type as a valid iter type.")
+
+        serialization_ctxt = kwargs.get("serialization_ctxt", {})
+        is_xml = kwargs.get("is_xml", False)
+
+        serialized = []
+        for d in data:
+            try:
+                serialized.append(self.serialize_data(d, iter_type, **kwargs))
+            except ValueError as err:
+                if isinstance(err, SerializationError):
+                    raise
+                serialized.append(None)
+
+        if kwargs.get("do_quote", False):
+            serialized = ["" if s is None else quote(str(s), safe="") for s in serialized]
+
+        if div:
+            serialized = ["" if s is None else str(s) for s in serialized]
+            serialized = div.join(serialized)
+
+        if "xml" in serialization_ctxt or is_xml:
+            # XML serialization is more complicated
+            xml_desc = serialization_ctxt.get("xml", {})
+            xml_name = xml_desc.get("name")
+            if not xml_name:
+                xml_name = serialization_ctxt["key"]
+
+            # Create a wrap node if necessary (use the fact that Element and list have "append")
+            is_wrapped = xml_desc.get("wrapped", False)
+            node_name = xml_desc.get("itemsName", xml_name)
+            if is_wrapped:
+                final_result = _create_xml_node(xml_name, xml_desc.get("prefix", None), xml_desc.get("ns", None))
+            else:
+                final_result = []
+            # All list elements to "local_node"
+            for el in serialized:
+                if isinstance(el, ET.Element):
+                    el_node = el
+                else:
+                    el_node = _create_xml_node(node_name, xml_desc.get("prefix", None), xml_desc.get("ns", None))
+                    if el is not None:  # Otherwise it writes "None" :-p
+                        el_node.text = str(el)
+                final_result.append(el_node)
+            return final_result
+        return serialized
+
+    def serialize_dict(self, attr, dict_type, **kwargs):
+        """Serialize a dictionary of objects.
+
+        :param dict attr: Object to be serialized.
+        :param str dict_type: Type of object in the dictionary.
+        :param bool required: Whether the objects in the dictionary must
+         not be None or empty.
+        :rtype: dict
+        """
+        serialization_ctxt = kwargs.get("serialization_ctxt", {})
+        serialized = {}
+        for key, value in attr.items():
+            try:
+                serialized[self.serialize_unicode(key)] = self.serialize_data(value, dict_type, **kwargs)
+            except ValueError as err:
+                if isinstance(err, SerializationError):
+                    raise
+                serialized[self.serialize_unicode(key)] = None
+
+        if "xml" in serialization_ctxt:
+            # XML serialization is more complicated
+            xml_desc = serialization_ctxt["xml"]
+            xml_name = xml_desc["name"]
+
+            final_result = _create_xml_node(xml_name, xml_desc.get("prefix", None), xml_desc.get("ns", None))
+            for key, value in serialized.items():
+                ET.SubElement(final_result, key).text = value
+            return final_result
+
+        return serialized
+
+    def serialize_object(self, attr, **kwargs):
+        """Serialize a generic object.
+        This will be handled as a dictionary. If object passed in is not
+        a basic type (str, int, float, dict, list) it will simply be
+        cast to str.
+
+        :param dict attr: Object to be serialized.
+        :rtype: dict or str
+        """
+        if attr is None:
+            return None
+        if isinstance(attr, ET.Element):
+            return attr
+        obj_type = type(attr)
+        if obj_type in self.basic_types:
+            return self.serialize_basic(attr, self.basic_types[obj_type], **kwargs)
+        if obj_type is _long_type:
+            return self.serialize_long(attr)
+        if obj_type is str:
+            return self.serialize_unicode(attr)
+        if obj_type is datetime.datetime:
+            return self.serialize_iso(attr)
+        if obj_type is datetime.date:
+            return self.serialize_date(attr)
+        if obj_type is datetime.time:
+            return self.serialize_time(attr)
+        if obj_type is datetime.timedelta:
+            return self.serialize_duration(attr)
+        if obj_type is decimal.Decimal:
+            return self.serialize_decimal(attr)
+
+        # If it's a model or I know this dependency, serialize as a Model
+        elif obj_type in self.dependencies.values() or isinstance(attr, Model):
+            return self._serialize(attr)
+
+        if obj_type == dict:
+            serialized = {}
+            for key, value in attr.items():
+                try:
+                    serialized[self.serialize_unicode(key)] = self.serialize_object(value, **kwargs)
+                except ValueError:
+                    serialized[self.serialize_unicode(key)] = None
+            return serialized
+
+        if obj_type == list:
+            serialized = []
+            for obj in attr:
+                try:
+                    serialized.append(self.serialize_object(obj, **kwargs))
+                except ValueError:
+                    pass
+            return serialized
+        return str(attr)
+
+    @staticmethod
+    def serialize_enum(attr, enum_obj=None):
+        try:
+            result = attr.value
+        except AttributeError:
+            result = attr
+        try:
+            enum_obj(result)  # type: ignore
+            return result
+        except ValueError:
+            for enum_value in enum_obj:  # type: ignore
+                if enum_value.value.lower() == str(attr).lower():
+                    return enum_value.value
+            error = "{!r} is not valid value for enum {!r}"
+            raise SerializationError(error.format(attr, enum_obj))
+
+    @staticmethod
+    def serialize_bytearray(attr, **kwargs):
+        """Serialize bytearray into base-64 string.
+
+        :param attr: Object to be serialized.
+        :rtype: str
+        """
+        return b64encode(attr).decode()
+
+    @staticmethod
+    def serialize_base64(attr, **kwargs):
+        """Serialize str into base-64 string.
+
+        :param attr: Object to be serialized.
+        :rtype: str
+        """
+        encoded = b64encode(attr).decode("ascii")
+        return encoded.strip("=").replace("+", "-").replace("/", "_")
+
+    @staticmethod
+    def serialize_decimal(attr, **kwargs):
+        """Serialize Decimal object to float.
+
+        :param attr: Object to be serialized.
+        :rtype: float
+        """
+        return float(attr)
+
+    @staticmethod
+    def serialize_long(attr, **kwargs):
+        """Serialize long (Py2) or int (Py3).
+
+        :param attr: Object to be serialized.
+        :rtype: int/long
+        """
+        return _long_type(attr)
+
+    @staticmethod
+    def serialize_date(attr, **kwargs):
+        """Serialize Date object into ISO-8601 formatted string.
+
+        :param Date attr: Object to be serialized.
+        :rtype: str
+        """
+        if isinstance(attr, str):
+            attr = isodate.parse_date(attr)
+        t = "{:04}-{:02}-{:02}".format(attr.year, attr.month, attr.day)
+        return t
+
+    @staticmethod
+    def serialize_time(attr, **kwargs):
+        """Serialize Time object into ISO-8601 formatted string.
+
+        :param datetime.time attr: Object to be serialized.
+        :rtype: str
+        """
+        if isinstance(attr, str):
+            attr = isodate.parse_time(attr)
+        t = "{:02}:{:02}:{:02}".format(attr.hour, attr.minute, attr.second)
+        if attr.microsecond:
+            t += ".{:02}".format(attr.microsecond)
+        return t
+
+    @staticmethod
+    def serialize_duration(attr, **kwargs):
+        """Serialize TimeDelta object into ISO-8601 formatted string.
+
+        :param TimeDelta attr: Object to be serialized.
+        :rtype: str
+        """
+        if isinstance(attr, str):
+            attr = isodate.parse_duration(attr)
+        return isodate.duration_isoformat(attr)
+
+    @staticmethod
+    def serialize_rfc(attr, **kwargs):
+        """Serialize Datetime object into RFC-1123 formatted string.
+
+        :param Datetime attr: Object to be serialized.
+        :rtype: str
+        :raises: TypeError if format invalid.
+        """
+        try:
+            if not attr.tzinfo:
+                _LOGGER.warning("Datetime with no tzinfo will be considered UTC.")
+            utc = attr.utctimetuple()
+        except AttributeError:
+            raise TypeError("RFC1123 object must be valid Datetime object.")
+
+        return "{}, {:02} {} {:04} {:02}:{:02}:{:02} GMT".format(
+            Serializer.days[utc.tm_wday],
+            utc.tm_mday,
+            Serializer.months[utc.tm_mon],
+            utc.tm_year,
+            utc.tm_hour,
+            utc.tm_min,
+            utc.tm_sec,
+        )
+
+    @staticmethod
+    def serialize_iso(attr, **kwargs):
+        """Serialize Datetime object into ISO-8601 formatted string.
+
+        :param Datetime attr: Object to be serialized.
+        :rtype: str
+        :raises: SerializationError if format invalid.
+        """
+        if isinstance(attr, str):
+            attr = isodate.parse_datetime(attr)
+        try:
+            if not attr.tzinfo:
+                _LOGGER.warning("Datetime with no tzinfo will be considered UTC.")
+            utc = attr.utctimetuple()
+            if utc.tm_year > 9999 or utc.tm_year < 1:
+                raise OverflowError("Hit max or min date")
+
+            microseconds = str(attr.microsecond).rjust(6, "0").rstrip("0").ljust(3, "0")
+            if microseconds:
+                microseconds = "." + microseconds
+            date = "{:04}-{:02}-{:02}T{:02}:{:02}:{:02}".format(
+                utc.tm_year, utc.tm_mon, utc.tm_mday, utc.tm_hour, utc.tm_min, utc.tm_sec
+            )
+            return date + microseconds + "Z"
+        except (ValueError, OverflowError) as err:
+            msg = "Unable to serialize datetime object."
+            raise SerializationError(msg) from err
+        except AttributeError as err:
+            msg = "ISO-8601 object must be valid Datetime object."
+            raise TypeError(msg) from err
+
+    @staticmethod
+    def serialize_unix(attr, **kwargs):
+        """Serialize Datetime object into IntTime format.
+        This is represented as seconds.
+
+        :param Datetime attr: Object to be serialized.
+        :rtype: int
+        :raises: SerializationError if format invalid
+        """
+        if isinstance(attr, int):
+            return attr
+        try:
+            if not attr.tzinfo:
+                _LOGGER.warning("Datetime with no tzinfo will be considered UTC.")
+            return int(calendar.timegm(attr.utctimetuple()))
+        except AttributeError:
+            raise TypeError("Unix time object must be valid Datetime object.")
+
+
+def rest_key_extractor(attr, attr_desc, data):
+    key = attr_desc["key"]
+    working_data = data
+
+    while "." in key:
+        # Need the cast, as for some reasons "split" is typed as list[str | Any]
+        dict_keys = cast(List[str], _FLATTEN.split(key))
+        if len(dict_keys) == 1:
+            key = _decode_attribute_map_key(dict_keys[0])
+            break
+        working_key = _decode_attribute_map_key(dict_keys[0])
+        working_data = working_data.get(working_key, data)
+        if working_data is None:
+            # If at any point while following flatten JSON path see None, it means
+            # that all properties under are None as well
+            return None
+        key = ".".join(dict_keys[1:])
+
+    return working_data.get(key)
+
+
+def rest_key_case_insensitive_extractor(attr, attr_desc, data):
+    key = attr_desc["key"]
+    working_data = data
+
+    while "." in key:
+        dict_keys = _FLATTEN.split(key)
+        if len(dict_keys) == 1:
+            key = _decode_attribute_map_key(dict_keys[0])
+            break
+        working_key = _decode_attribute_map_key(dict_keys[0])
+        working_data = attribute_key_case_insensitive_extractor(working_key, None, working_data)
+        if working_data is None:
+            # If at any point while following flatten JSON path see None, it means
+            # that all properties under are None as well
+            return None
+        key = ".".join(dict_keys[1:])
+
+    if working_data:
+        return attribute_key_case_insensitive_extractor(key, None, working_data)
+
+
+def last_rest_key_extractor(attr, attr_desc, data):
+    """Extract the attribute in "data" based on the last part of the JSON path key."""
+    key = attr_desc["key"]
+    dict_keys = _FLATTEN.split(key)
+    return attribute_key_extractor(dict_keys[-1], None, data)
+
+
+def last_rest_key_case_insensitive_extractor(attr, attr_desc, data):
+    """Extract the attribute in "data" based on the last part of the JSON path key.
+
+    This is the case insensitive version of "last_rest_key_extractor"
+    """
+    key = attr_desc["key"]
+    dict_keys = _FLATTEN.split(key)
+    return attribute_key_case_insensitive_extractor(dict_keys[-1], None, data)
+
+
+def attribute_key_extractor(attr, _, data):
+    return data.get(attr)
+
+
+def attribute_key_case_insensitive_extractor(attr, _, data):
+    found_key = None
+    lower_attr = attr.lower()
+    for key in data:
+        if lower_attr == key.lower():
+            found_key = key
+            break
+
+    return data.get(found_key)
+
+
+def _extract_name_from_internal_type(internal_type):
+    """Given an internal type XML description, extract correct XML name with namespace.
+
+    :param dict internal_type: An model type
+    :rtype: tuple
+    :returns: A tuple XML name + namespace dict
+    """
+    internal_type_xml_map = getattr(internal_type, "_xml_map", {})
+    xml_name = internal_type_xml_map.get("name", internal_type.__name__)
+    xml_ns = internal_type_xml_map.get("ns", None)
+    if xml_ns:
+        xml_name = "{{{}}}{}".format(xml_ns, xml_name)
+    return xml_name
+
+
+def xml_key_extractor(attr, attr_desc, data):
+    if isinstance(data, dict):
+        return None
+
+    # Test if this model is XML ready first
+    if not isinstance(data, ET.Element):
+        return None
+
+    xml_desc = attr_desc.get("xml", {})
+    xml_name = xml_desc.get("name", attr_desc["key"])
+
+    # Look for a children
+    is_iter_type = attr_desc["type"].startswith("[")
+    is_wrapped = xml_desc.get("wrapped", False)
+    internal_type = attr_desc.get("internalType", None)
+    internal_type_xml_map = getattr(internal_type, "_xml_map", {})
+
+    # Integrate namespace if necessary
+    xml_ns = xml_desc.get("ns", internal_type_xml_map.get("ns", None))
+    if xml_ns:
+        xml_name = "{{{}}}{}".format(xml_ns, xml_name)
+
+    # If it's an attribute, that's simple
+    if xml_desc.get("attr", False):
+        return data.get(xml_name)
+
+    # If it's x-ms-text, that's simple too
+    if xml_desc.get("text", False):
+        return data.text
+
+    # Scenario where I take the local name:
+    # - Wrapped node
+    # - Internal type is an enum (considered basic types)
+    # - Internal type has no XML/Name node
+    if is_wrapped or (internal_type and (issubclass(internal_type, Enum) or "name" not in internal_type_xml_map)):
+        children = data.findall(xml_name)
+    # If internal type has a local name and it's not a list, I use that name
+    elif not is_iter_type and internal_type and "name" in internal_type_xml_map:
+        xml_name = _extract_name_from_internal_type(internal_type)
+        children = data.findall(xml_name)
+    # That's an array
+    else:
+        if internal_type:  # Complex type, ignore itemsName and use the complex type name
+            items_name = _extract_name_from_internal_type(internal_type)
+        else:
+            items_name = xml_desc.get("itemsName", xml_name)
+        children = data.findall(items_name)
+
+    if len(children) == 0:
+        if is_iter_type:
+            if is_wrapped:
+                return None  # is_wrapped no node, we want None
+            else:
+                return []  # not wrapped, assume empty list
+        return None  # Assume it's not there, maybe an optional node.
+
+    # If is_iter_type and not wrapped, return all found children
+    if is_iter_type:
+        if not is_wrapped:
+            return children
+        else:  # Iter and wrapped, should have found one node only (the wrap one)
+            if len(children) != 1:
+                raise DeserializationError(
+                    "Tried to deserialize an array not wrapped, and found several nodes '{}'. Maybe you should declare this array as wrapped?".format(
+                        xml_name
+                    )
+                )
+            return list(children[0])  # Might be empty list and that's ok.
+
+    # Here it's not a itertype, we should have found one element only or empty
+    if len(children) > 1:
+        raise DeserializationError("Find several XML '{}' where it was not expected".format(xml_name))
+    return children[0]
+
+
+class Deserializer(object):
+    """Response object model deserializer.
+
+    :param dict classes: Class type dictionary for deserializing complex types.
+    :ivar list key_extractors: Ordered list of extractors to be used by this deserializer.
+    """
+
+    basic_types = {str: "str", int: "int", bool: "bool", float: "float"}
+
+    valid_date = re.compile(r"\d{4}[-]\d{2}[-]\d{2}T\d{2}:\d{2}:\d{2}" r"\.?\d*Z?[-+]?[\d{2}]?:?[\d{2}]?")
+
+    def __init__(self, classes: Optional[Mapping[str, type]] = None):
+        self.deserialize_type = {
+            "iso-8601": Deserializer.deserialize_iso,
+            "rfc-1123": Deserializer.deserialize_rfc,
+            "unix-time": Deserializer.deserialize_unix,
+            "duration": Deserializer.deserialize_duration,
+            "date": Deserializer.deserialize_date,
+            "time": Deserializer.deserialize_time,
+            "decimal": Deserializer.deserialize_decimal,
+            "long": Deserializer.deserialize_long,
+            "bytearray": Deserializer.deserialize_bytearray,
+            "base64": Deserializer.deserialize_base64,
+            "object": self.deserialize_object,
+            "[]": self.deserialize_iter,
+            "{}": self.deserialize_dict,
+        }
+        self.deserialize_expected_types = {
+            "duration": (isodate.Duration, datetime.timedelta),
+            "iso-8601": (datetime.datetime),
+        }
+        self.dependencies: Dict[str, type] = dict(classes) if classes else {}
+        self.key_extractors = [rest_key_extractor, xml_key_extractor]
+        # Additional properties only works if the "rest_key_extractor" is used to
+        # extract the keys. Making it to work whatever the key extractor is too much
+        # complicated, with no real scenario for now.
+        # So adding a flag to disable additional properties detection. This flag should be
+        # used if your expect the deserialization to NOT come from a JSON REST syntax.
+        # Otherwise, result are unexpected
+        self.additional_properties_detection = True
+
+    def __call__(self, target_obj, response_data, content_type=None):
+        """Call the deserializer to process a REST response.
+
+        :param str target_obj: Target data type to deserialize to.
+        :param requests.Response response_data: REST response object.
+        :param str content_type: Swagger "produces" if available.
+        :raises: DeserializationError if deserialization fails.
+        :return: Deserialized object.
+        """
+        data = self._unpack_content(response_data, content_type)
+        return self._deserialize(target_obj, data)
+
+    def _deserialize(self, target_obj, data):
+        """Call the deserializer on a model.
+
+        Data needs to be already deserialized as JSON or XML ElementTree
+
+        :param str target_obj: Target data type to deserialize to.
+        :param object data: Object to deserialize.
+        :raises: DeserializationError if deserialization fails.
+        :return: Deserialized object.
+        """
+        # This is already a model, go recursive just in case
+        if hasattr(data, "_attribute_map"):
+            constants = [name for name, config in getattr(data, "_validation", {}).items() if config.get("constant")]
+            try:
+                for attr, mapconfig in data._attribute_map.items():
+                    if attr in constants:
+                        continue
+                    value = getattr(data, attr)
+                    if value is None:
+                        continue
+                    local_type = mapconfig["type"]
+                    internal_data_type = local_type.strip("[]{}")
+                    if internal_data_type not in self.dependencies or isinstance(internal_data_type, Enum):
+                        continue
+                    setattr(data, attr, self._deserialize(local_type, value))
+                return data
+            except AttributeError:
+                return
+
+        response, class_name = self._classify_target(target_obj, data)
+
+        if isinstance(response, str):
+            return self.deserialize_data(data, response)
+        elif isinstance(response, type) and issubclass(response, Enum):
+            return self.deserialize_enum(data, response)
+
+        if data is None:
+            return data
+        try:
+            attributes = response._attribute_map  # type: ignore
+            d_attrs = {}
+            for attr, attr_desc in attributes.items():
+                # Check empty string. If it's not empty, someone has a real "additionalProperties"...
+                if attr == "additional_properties" and attr_desc["key"] == "":
+                    continue
+                raw_value = None
+                # Enhance attr_desc with some dynamic data
+                attr_desc = attr_desc.copy()  # Do a copy, do not change the real one
+                internal_data_type = attr_desc["type"].strip("[]{}")
+                if internal_data_type in self.dependencies:
+                    attr_desc["internalType"] = self.dependencies[internal_data_type]
+
+                for key_extractor in self.key_extractors:
+                    found_value = key_extractor(attr, attr_desc, data)
+                    if found_value is not None:
+                        if raw_value is not None and raw_value != found_value:
+                            msg = (
+                                "Ignoring extracted value '%s' from %s for key '%s'"
+                                " (duplicate extraction, follow extractors order)"
+                            )
+                            _LOGGER.warning(msg, found_value, key_extractor, attr)
+                            continue
+                        raw_value = found_value
+
+                value = self.deserialize_data(raw_value, attr_desc["type"])
+                d_attrs[attr] = value
+        except (AttributeError, TypeError, KeyError) as err:
+            msg = "Unable to deserialize to object: " + class_name  # type: ignore
+            raise DeserializationError(msg) from err
+        else:
+            additional_properties = self._build_additional_properties(attributes, data)
+            return self._instantiate_model(response, d_attrs, additional_properties)
+
+    def _build_additional_properties(self, attribute_map, data):
+        if not self.additional_properties_detection:
+            return None
+        if "additional_properties" in attribute_map and attribute_map.get("additional_properties", {}).get("key") != "":
+            # Check empty string. If it's not empty, someone has a real "additionalProperties"
+            return None
+        if isinstance(data, ET.Element):
+            data = {el.tag: el.text for el in data}
+
+        known_keys = {
+            _decode_attribute_map_key(_FLATTEN.split(desc["key"])[0])
+            for desc in attribute_map.values()
+            if desc["key"] != ""
+        }
+        present_keys = set(data.keys())
+        missing_keys = present_keys - known_keys
+        return {key: data[key] for key in missing_keys}
+
+    def _classify_target(self, target, data):
+        """Check to see whether the deserialization target object can
+        be classified into a subclass.
+        Once classification has been determined, initialize object.
+
+        :param str target: The target object type to deserialize to.
+        :param str/dict data: The response data to deserialize.
+        """
+        if target is None:
+            return None, None
+
+        if isinstance(target, str):
+            try:
+                target = self.dependencies[target]
+            except KeyError:
+                return target, target
+
+        try:
+            target = target._classify(data, self.dependencies)  # type: ignore
+        except AttributeError:
+            pass  # Target is not a Model, no classify
+        return target, target.__class__.__name__  # type: ignore
+
+    def failsafe_deserialize(self, target_obj, data, content_type=None):
+        """Ignores any errors encountered in deserialization,
+        and falls back to not deserializing the object. Recommended
+        for use in error deserialization, as we want to return the
+        HttpResponseError to users, and not have them deal with
+        a deserialization error.
+
+        :param str target_obj: The target object type to deserialize to.
+        :param str/dict data: The response data to deserialize.
+        :param str content_type: Swagger "produces" if available.
+        """
+        try:
+            return self(target_obj, data, content_type=content_type)
+        except:
+            _LOGGER.debug(
+                "Ran into a deserialization error. Ignoring since this is failsafe deserialization", exc_info=True
+            )
+            return None
+
+    @staticmethod
+    def _unpack_content(raw_data, content_type=None):
+        """Extract the correct structure for deserialization.
+
+        If raw_data is a PipelineResponse, try to extract the result of RawDeserializer.
+        if we can't, raise. Your Pipeline should have a RawDeserializer.
+
+        If not a pipeline response and raw_data is bytes or string, use content-type
+        to decode it. If no content-type, try JSON.
+
+        If raw_data is something else, bypass all logic and return it directly.
+
+        :param raw_data: Data to be processed.
+        :param content_type: How to parse if raw_data is a string/bytes.
+        :raises JSONDecodeError: If JSON is requested and parsing is impossible.
+        :raises UnicodeDecodeError: If bytes is not UTF8
+        """
+        # Assume this is enough to detect a Pipeline Response without importing it
+        context = getattr(raw_data, "context", {})
+        if context:
+            if RawDeserializer.CONTEXT_NAME in context:
+                return context[RawDeserializer.CONTEXT_NAME]
+            raise ValueError("This pipeline didn't have the RawDeserializer policy; can't deserialize")
+
+        # Assume this is enough to recognize universal_http.ClientResponse without importing it
+        if hasattr(raw_data, "body"):
+            return RawDeserializer.deserialize_from_http_generics(raw_data.text(), raw_data.headers)
+
+        # Assume this enough to recognize requests.Response without importing it.
+        if hasattr(raw_data, "_content_consumed"):
+            return RawDeserializer.deserialize_from_http_generics(raw_data.text, raw_data.headers)
+
+        if isinstance(raw_data, (str, bytes)) or hasattr(raw_data, "read"):
+            return RawDeserializer.deserialize_from_text(raw_data, content_type)  # type: ignore
+        return raw_data
+
+    def _instantiate_model(self, response, attrs, additional_properties=None):
+        """Instantiate a response model passing in deserialized args.
+
+        :param response: The response model class.
+        :param d_attrs: The deserialized response attributes.
+        """
+        if callable(response):
+            subtype = getattr(response, "_subtype_map", {})
+            try:
+                readonly = [k for k, v in response._validation.items() if v.get("readonly")]
+                const = [k for k, v in response._validation.items() if v.get("constant")]
+                kwargs = {k: v for k, v in attrs.items() if k not in subtype and k not in readonly + const}
+                response_obj = response(**kwargs)
+                for attr in readonly:
+                    setattr(response_obj, attr, attrs.get(attr))
+                if additional_properties:
+                    response_obj.additional_properties = additional_properties
+                return response_obj
+            except TypeError as err:
+                msg = "Unable to deserialize {} into model {}. ".format(kwargs, response)  # type: ignore
+                raise DeserializationError(msg + str(err))
+        else:
+            try:
+                for attr, value in attrs.items():
+                    setattr(response, attr, value)
+                return response
+            except Exception as exp:
+                msg = "Unable to populate response model. "
+                msg += "Type: {}, Error: {}".format(type(response), exp)
+                raise DeserializationError(msg)
+
+    def deserialize_data(self, data, data_type):
+        """Process data for deserialization according to data type.
+
+        :param str data: The response string to be deserialized.
+        :param str data_type: The type to deserialize to.
+        :raises: DeserializationError if deserialization fails.
+        :return: Deserialized object.
+        """
+        if data is None:
+            return data
+
+        try:
+            if not data_type:
+                return data
+            if data_type in self.basic_types.values():
+                return self.deserialize_basic(data, data_type)
+            if data_type in self.deserialize_type:
+                if isinstance(data, self.deserialize_expected_types.get(data_type, tuple())):
+                    return data
+
+                is_a_text_parsing_type = lambda x: x not in ["object", "[]", r"{}"]
+                if isinstance(data, ET.Element) and is_a_text_parsing_type(data_type) and not data.text:
+                    return None
+                data_val = self.deserialize_type[data_type](data)
+                return data_val
+
+            iter_type = data_type[0] + data_type[-1]
+            if iter_type in self.deserialize_type:
+                return self.deserialize_type[iter_type](data, data_type[1:-1])
+
+            obj_type = self.dependencies[data_type]
+            if issubclass(obj_type, Enum):
+                if isinstance(data, ET.Element):
+                    data = data.text
+                return self.deserialize_enum(data, obj_type)
+
+        except (ValueError, TypeError, AttributeError) as err:
+            msg = "Unable to deserialize response data."
+            msg += " Data: {}, {}".format(data, data_type)
+            raise DeserializationError(msg) from err
+        else:
+            return self._deserialize(obj_type, data)
+
+    def deserialize_iter(self, attr, iter_type):
+        """Deserialize an iterable.
+
+        :param list attr: Iterable to be deserialized.
+        :param str iter_type: The type of object in the iterable.
+        :rtype: list
+        """
+        if attr is None:
+            return None
+        if isinstance(attr, ET.Element):  # If I receive an element here, get the children
+            attr = list(attr)
+        if not isinstance(attr, (list, set)):
+            raise DeserializationError("Cannot deserialize as [{}] an object of type {}".format(iter_type, type(attr)))
+        return [self.deserialize_data(a, iter_type) for a in attr]
+
+    def deserialize_dict(self, attr, dict_type):
+        """Deserialize a dictionary.
+
+        :param dict/list attr: Dictionary to be deserialized. Also accepts
+         a list of key, value pairs.
+        :param str dict_type: The object type of the items in the dictionary.
+        :rtype: dict
+        """
+        if isinstance(attr, list):
+            return {x["key"]: self.deserialize_data(x["value"], dict_type) for x in attr}
+
+        if isinstance(attr, ET.Element):
+            # Transform <Key>value</Key> into {"Key": "value"}
+            attr = {el.tag: el.text for el in attr}
+        return {k: self.deserialize_data(v, dict_type) for k, v in attr.items()}
+
+    def deserialize_object(self, attr, **kwargs):
+        """Deserialize a generic object.
+        This will be handled as a dictionary.
+
+        :param dict attr: Dictionary to be deserialized.
+        :rtype: dict
+        :raises: TypeError if non-builtin datatype encountered.
+        """
+        if attr is None:
+            return None
+        if isinstance(attr, ET.Element):
+            # Do no recurse on XML, just return the tree as-is
+            return attr
+        if isinstance(attr, str):
+            return self.deserialize_basic(attr, "str")
+        obj_type = type(attr)
+        if obj_type in self.basic_types:
+            return self.deserialize_basic(attr, self.basic_types[obj_type])
+        if obj_type is _long_type:
+            return self.deserialize_long(attr)
+
+        if obj_type == dict:
+            deserialized = {}
+            for key, value in attr.items():
+                try:
+                    deserialized[key] = self.deserialize_object(value, **kwargs)
+                except ValueError:
+                    deserialized[key] = None
+            return deserialized
+
+        if obj_type == list:
+            deserialized = []
+            for obj in attr:
+                try:
+                    deserialized.append(self.deserialize_object(obj, **kwargs))
+                except ValueError:
+                    pass
+            return deserialized
+
+        else:
+            error = "Cannot deserialize generic object with type: "
+            raise TypeError(error + str(obj_type))
+
+    def deserialize_basic(self, attr, data_type):
+        """Deserialize basic builtin data type from string.
+        Will attempt to convert to str, int, float and bool.
+        This function will also accept '1', '0', 'true' and 'false' as
+        valid bool values.
+
+        :param str attr: response string to be deserialized.
+        :param str data_type: deserialization data type.
+        :rtype: str, int, float or bool
+        :raises: TypeError if string format is not valid.
+        """
+        # If we're here, data is supposed to be a basic type.
+        # If it's still an XML node, take the text
+        if isinstance(attr, ET.Element):
+            attr = attr.text
+            if not attr:
+                if data_type == "str":
+                    # None or '', node <a/> is empty string.
+                    return ""
+                else:
+                    # None or '', node <a/> with a strong type is None.
+                    # Don't try to model "empty bool" or "empty int"
+                    return None
+
+        if data_type == "bool":
+            if attr in [True, False, 1, 0]:
+                return bool(attr)
+            elif isinstance(attr, str):
+                if attr.lower() in ["true", "1"]:
+                    return True
+                elif attr.lower() in ["false", "0"]:
+                    return False
+            raise TypeError("Invalid boolean value: {}".format(attr))
+
+        if data_type == "str":
+            return self.deserialize_unicode(attr)
+        return eval(data_type)(attr)  # nosec
+
+    @staticmethod
+    def deserialize_unicode(data):
+        """Preserve unicode objects in Python 2, otherwise return data
+        as a string.
+
+        :param str data: response string to be deserialized.
+        :rtype: str or unicode
+        """
+        # We might be here because we have an enum modeled as string,
+        # and we try to deserialize a partial dict with enum inside
+        if isinstance(data, Enum):
+            return data
+
+        # Consider this is real string
+        try:
+            if isinstance(data, unicode):  # type: ignore
+                return data
+        except NameError:
+            return str(data)
+        else:
+            return str(data)
+
+    @staticmethod
+    def deserialize_enum(data, enum_obj):
+        """Deserialize string into enum object.
+
+        If the string is not a valid enum value it will be returned as-is
+        and a warning will be logged.
+
+        :param str data: Response string to be deserialized. If this value is
+         None or invalid it will be returned as-is.
+        :param Enum enum_obj: Enum object to deserialize to.
+        :rtype: Enum
+        """
+        if isinstance(data, enum_obj) or data is None:
+            return data
+        if isinstance(data, Enum):
+            data = data.value
+        if isinstance(data, int):
+            # Workaround. We might consider remove it in the future.
+            try:
+                return list(enum_obj.__members__.values())[data]
+            except IndexError:
+                error = "{!r} is not a valid index for enum {!r}"
+                raise DeserializationError(error.format(data, enum_obj))
+        try:
+            return enum_obj(str(data))
+        except ValueError:
+            for enum_value in enum_obj:
+                if enum_value.value.lower() == str(data).lower():
+                    return enum_value
+            # We don't fail anymore for unknown value, we deserialize as a string
+            _LOGGER.warning("Deserializer is not able to find %s as valid enum in %s", data, enum_obj)
+            return Deserializer.deserialize_unicode(data)
+
+    @staticmethod
+    def deserialize_bytearray(attr):
+        """Deserialize string into bytearray.
+
+        :param str attr: response string to be deserialized.
+        :rtype: bytearray
+        :raises: TypeError if string format invalid.
+        """
+        if isinstance(attr, ET.Element):
+            attr = attr.text
+        return bytearray(b64decode(attr))  # type: ignore
+
+    @staticmethod
+    def deserialize_base64(attr):
+        """Deserialize base64 encoded string into string.
+
+        :param str attr: response string to be deserialized.
+        :rtype: bytearray
+        :raises: TypeError if string format invalid.
+        """
+        if isinstance(attr, ET.Element):
+            attr = attr.text
+        padding = "=" * (3 - (len(attr) + 3) % 4)  # type: ignore
+        attr = attr + padding  # type: ignore
+        encoded = attr.replace("-", "+").replace("_", "/")
+        return b64decode(encoded)
+
+    @staticmethod
+    def deserialize_decimal(attr):
+        """Deserialize string into Decimal object.
+
+        :param str attr: response string to be deserialized.
+        :rtype: Decimal
+        :raises: DeserializationError if string format invalid.
+        """
+        if isinstance(attr, ET.Element):
+            attr = attr.text
+        try:
+            return decimal.Decimal(str(attr))  # type: ignore
+        except decimal.DecimalException as err:
+            msg = "Invalid decimal {}".format(attr)
+            raise DeserializationError(msg) from err
+
+    @staticmethod
+    def deserialize_long(attr):
+        """Deserialize string into long (Py2) or int (Py3).
+
+        :param str attr: response string to be deserialized.
+        :rtype: long or int
+        :raises: ValueError if string format invalid.
+        """
+        if isinstance(attr, ET.Element):
+            attr = attr.text
+        return _long_type(attr)  # type: ignore
+
+    @staticmethod
+    def deserialize_duration(attr):
+        """Deserialize ISO-8601 formatted string into TimeDelta object.
+
+        :param str attr: response string to be deserialized.
+        :rtype: TimeDelta
+        :raises: DeserializationError if string format invalid.
+        """
+        if isinstance(attr, ET.Element):
+            attr = attr.text
+        try:
+            duration = isodate.parse_duration(attr)
+        except (ValueError, OverflowError, AttributeError) as err:
+            msg = "Cannot deserialize duration object."
+            raise DeserializationError(msg) from err
+        else:
+            return duration
+
+    @staticmethod
+    def deserialize_date(attr):
+        """Deserialize ISO-8601 formatted string into Date object.
+
+        :param str attr: response string to be deserialized.
+        :rtype: Date
+        :raises: DeserializationError if string format invalid.
+        """
+        if isinstance(attr, ET.Element):
+            attr = attr.text
+        if re.search(r"[^\W\d_]", attr, re.I + re.U):  # type: ignore
+            raise DeserializationError("Date must have only digits and -. Received: %s" % attr)
+        # This must NOT use defaultmonth/defaultday. Using None ensure this raises an exception.
+        return isodate.parse_date(attr, defaultmonth=0, defaultday=0)
+
+    @staticmethod
+    def deserialize_time(attr):
+        """Deserialize ISO-8601 formatted string into time object.
+
+        :param str attr: response string to be deserialized.
+        :rtype: datetime.time
+        :raises: DeserializationError if string format invalid.
+        """
+        if isinstance(attr, ET.Element):
+            attr = attr.text
+        if re.search(r"[^\W\d_]", attr, re.I + re.U):  # type: ignore
+            raise DeserializationError("Date must have only digits and -. Received: %s" % attr)
+        return isodate.parse_time(attr)
+
+    @staticmethod
+    def deserialize_rfc(attr):
+        """Deserialize RFC-1123 formatted string into Datetime object.
+
+        :param str attr: response string to be deserialized.
+        :rtype: Datetime
+        :raises: DeserializationError if string format invalid.
+        """
+        if isinstance(attr, ET.Element):
+            attr = attr.text
+        try:
+            parsed_date = email.utils.parsedate_tz(attr)  # type: ignore
+            date_obj = datetime.datetime(
+                *parsed_date[:6], tzinfo=_FixedOffset(datetime.timedelta(minutes=(parsed_date[9] or 0) / 60))
+            )
+            if not date_obj.tzinfo:
+                date_obj = date_obj.astimezone(tz=TZ_UTC)
+        except ValueError as err:
+            msg = "Cannot deserialize to rfc datetime object."
+            raise DeserializationError(msg) from err
+        else:
+            return date_obj
+
+    @staticmethod
+    def deserialize_iso(attr):
+        """Deserialize ISO-8601 formatted string into Datetime object.
+
+        :param str attr: response string to be deserialized.
+        :rtype: Datetime
+        :raises: DeserializationError if string format invalid.
+        """
+        if isinstance(attr, ET.Element):
+            attr = attr.text
+        try:
+            attr = attr.upper()  # type: ignore
+            match = Deserializer.valid_date.match(attr)
+            if not match:
+                raise ValueError("Invalid datetime string: " + attr)
+
+            check_decimal = attr.split(".")
+            if len(check_decimal) > 1:
+                decimal_str = ""
+                for digit in check_decimal[1]:
+                    if digit.isdigit():
+                        decimal_str += digit
+                    else:
+                        break
+                if len(decimal_str) > 6:
+                    attr = attr.replace(decimal_str, decimal_str[0:6])
+
+            date_obj = isodate.parse_datetime(attr)
+            test_utc = date_obj.utctimetuple()
+            if test_utc.tm_year > 9999 or test_utc.tm_year < 1:
+                raise OverflowError("Hit max or min date")
+        except (ValueError, OverflowError, AttributeError) as err:
+            msg = "Cannot deserialize datetime object."
+            raise DeserializationError(msg) from err
+        else:
+            return date_obj
+
+    @staticmethod
+    def deserialize_unix(attr):
+        """Serialize Datetime object into IntTime format.
+        This is represented as seconds.
+
+        :param int attr: Object to be serialized.
+        :rtype: Datetime
+        :raises: DeserializationError if format invalid
+        """
+        if isinstance(attr, ET.Element):
+            attr = int(attr.text)  # type: ignore
+        try:
+            attr = int(attr)
+            date_obj = datetime.datetime.fromtimestamp(attr, TZ_UTC)
+        except ValueError as err:
+            msg = "Cannot deserialize to unix datetime object."
+            raise DeserializationError(msg) from err
+        else:
+            return date_obj
diff --git a/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/_vendor.py b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/_vendor.py
new file mode 100644
index 00000000..3e291d2b
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/_vendor.py
@@ -0,0 +1,26 @@
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from abc import ABC
+from typing import TYPE_CHECKING
+
+from ._configuration import QuickpulseClientConfiguration
+
+if TYPE_CHECKING:
+    # pylint: disable=unused-import,ungrouped-imports
+    from azure.core import PipelineClient
+
+    from ._serialization import Deserializer, Serializer
+
+
+class QuickpulseClientMixinABC(ABC):
+    """DO NOT use this class. It is for internal typing use only."""
+
+    _client: "PipelineClient"
+    _config: QuickpulseClientConfiguration
+    _serialize: "Serializer"
+    _deserialize: "Deserializer"
diff --git a/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/aio/__init__.py b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/aio/__init__.py
new file mode 100644
index 00000000..664b539c
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/aio/__init__.py
@@ -0,0 +1,23 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from ._client import QuickpulseClient
+
+try:
+    from ._patch import __all__ as _patch_all
+    from ._patch import *  # pylint: disable=unused-wildcard-import
+except ImportError:
+    _patch_all = []
+from ._patch import patch_sdk as _patch_sdk
+
+__all__ = [
+    "QuickpulseClient",
+]
+__all__.extend([p for p in _patch_all if p not in __all__])
+
+_patch_sdk()
diff --git a/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/aio/_client.py b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/aio/_client.py
new file mode 100644
index 00000000..2f362639
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/aio/_client.py
@@ -0,0 +1,95 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from copy import deepcopy
+from typing import Any, Awaitable, TYPE_CHECKING
+
+from azure.core import AsyncPipelineClient
+from azure.core.pipeline import policies
+from azure.core.rest import AsyncHttpResponse, HttpRequest
+
+from .. import models as _models
+from .._serialization import Deserializer, Serializer
+from ._configuration import QuickpulseClientConfiguration
+from ._operations import QuickpulseClientOperationsMixin
+
+if TYPE_CHECKING:
+    # pylint: disable=unused-import,ungrouped-imports
+    from azure.core.credentials_async import AsyncTokenCredential
+
+
+class QuickpulseClient(QuickpulseClientOperationsMixin):  # pylint: disable=client-accepts-api-version-keyword
+    """Quickpulse Client.
+
+    :param credential: Credential needed for the client to connect to Azure. Required.
+    :type credential: ~azure.core.credentials_async.AsyncTokenCredential
+    :keyword api_version: Api Version. Default value is "2024-04-01-preview". Note that overriding
+     this default value may result in unsupported behavior.
+    :paramtype api_version: str
+    """
+
+    def __init__(self, credential: "AsyncTokenCredential", **kwargs: Any) -> None:
+        _endpoint = "{endpoint}"
+        self._config = QuickpulseClientConfiguration(credential=credential, **kwargs)
+        _policies = kwargs.pop("policies", None)
+        if _policies is None:
+            _policies = [
+                policies.RequestIdPolicy(**kwargs),
+                self._config.headers_policy,
+                self._config.user_agent_policy,
+                self._config.proxy_policy,
+                policies.ContentDecodePolicy(**kwargs),
+                self._config.redirect_policy,
+                self._config.retry_policy,
+                self._config.authentication_policy,
+                self._config.custom_hook_policy,
+                self._config.logging_policy,
+                policies.DistributedTracingPolicy(**kwargs),
+                policies.SensitiveHeaderCleanupPolicy(**kwargs) if self._config.redirect_policy else None,
+                self._config.http_logging_policy,
+            ]
+        self._client: AsyncPipelineClient = AsyncPipelineClient(base_url=_endpoint, policies=_policies, **kwargs)
+
+        client_models = {k: v for k, v in _models.__dict__.items() if isinstance(v, type)}
+        self._serialize = Serializer(client_models)
+        self._deserialize = Deserializer(client_models)
+        self._serialize.client_side_validation = False
+
+    def send_request(
+        self, request: HttpRequest, *, stream: bool = False, **kwargs: Any
+    ) -> Awaitable[AsyncHttpResponse]:
+        """Runs the network request through the client's chained policies.
+
+        >>> from azure.core.rest import HttpRequest
+        >>> request = HttpRequest("GET", "https://www.example.org/")
+        <HttpRequest [GET], url: 'https://www.example.org/'>
+        >>> response = await client.send_request(request)
+        <AsyncHttpResponse: 200 OK>
+
+        For more information on this code flow, see https://aka.ms/azsdk/dpcodegen/python/send_request
+
+        :param request: The network request you want to make. Required.
+        :type request: ~azure.core.rest.HttpRequest
+        :keyword bool stream: Whether the response payload will be streamed. Defaults to False.
+        :return: The response of your network call. Does not do error handling on your response.
+        :rtype: ~azure.core.rest.AsyncHttpResponse
+        """
+
+        request_copy = deepcopy(request)
+        request_copy.url = self._client.format_url(request_copy.url)
+        return self._client.send_request(request_copy, stream=stream, **kwargs)  # type: ignore
+
+    async def close(self) -> None:
+        await self._client.close()
+
+    async def __aenter__(self) -> "QuickpulseClient":
+        await self._client.__aenter__()
+        return self
+
+    async def __aexit__(self, *exc_details: Any) -> None:
+        await self._client.__aexit__(*exc_details)
diff --git a/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/aio/_configuration.py b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/aio/_configuration.py
new file mode 100644
index 00000000..d6746c09
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/aio/_configuration.py
@@ -0,0 +1,59 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from typing import Any, TYPE_CHECKING
+
+from azure.core.pipeline import policies
+
+if TYPE_CHECKING:
+    # pylint: disable=unused-import,ungrouped-imports
+    from azure.core.credentials_async import AsyncTokenCredential
+
+VERSION = "unknown"
+
+
+class QuickpulseClientConfiguration:  # pylint: disable=too-many-instance-attributes,name-too-long
+    """Configuration for QuickpulseClient.
+
+    Note that all parameters used to create this instance are saved as instance
+    attributes.
+
+    :param credential: Credential needed for the client to connect to Azure. Required.
+    :type credential: ~azure.core.credentials_async.AsyncTokenCredential
+    :keyword api_version: Api Version. Default value is "2024-04-01-preview". Note that overriding
+     this default value may result in unsupported behavior.
+    :paramtype api_version: str
+    """
+
+    def __init__(self, credential: "AsyncTokenCredential", **kwargs: Any) -> None:
+        api_version: str = kwargs.pop("api_version", "2024-04-01-preview")
+
+        if credential is None:
+            raise ValueError("Parameter 'credential' must not be None.")
+
+        self.credential = credential
+        self.api_version = api_version
+        self.credential_scopes = kwargs.pop("credential_scopes", ["https://monitor.azure.com/.default"])
+        kwargs.setdefault("sdk_moniker", "quickpulseclient/{}".format(VERSION))
+        self.polling_interval = kwargs.get("polling_interval", 30)
+        self._configure(**kwargs)
+
+    def _configure(self, **kwargs: Any) -> None:
+        self.user_agent_policy = kwargs.get("user_agent_policy") or policies.UserAgentPolicy(**kwargs)
+        self.headers_policy = kwargs.get("headers_policy") or policies.HeadersPolicy(**kwargs)
+        self.proxy_policy = kwargs.get("proxy_policy") or policies.ProxyPolicy(**kwargs)
+        self.logging_policy = kwargs.get("logging_policy") or policies.NetworkTraceLoggingPolicy(**kwargs)
+        self.http_logging_policy = kwargs.get("http_logging_policy") or policies.HttpLoggingPolicy(**kwargs)
+        self.custom_hook_policy = kwargs.get("custom_hook_policy") or policies.CustomHookPolicy(**kwargs)
+        self.redirect_policy = kwargs.get("redirect_policy") or policies.AsyncRedirectPolicy(**kwargs)
+        self.retry_policy = kwargs.get("retry_policy") or policies.AsyncRetryPolicy(**kwargs)
+        self.authentication_policy = kwargs.get("authentication_policy")
+        if self.credential and not self.authentication_policy:
+            self.authentication_policy = policies.AsyncBearerTokenCredentialPolicy(
+                self.credential, *self.credential_scopes, **kwargs
+            )
diff --git a/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/aio/_operations/__init__.py b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/aio/_operations/__init__.py
new file mode 100644
index 00000000..3d1697f9
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/aio/_operations/__init__.py
@@ -0,0 +1,19 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from ._operations import QuickpulseClientOperationsMixin
+
+from ._patch import __all__ as _patch_all
+from ._patch import *  # pylint: disable=unused-wildcard-import
+from ._patch import patch_sdk as _patch_sdk
+
+__all__ = [
+    "QuickpulseClientOperationsMixin",
+]
+__all__.extend([p for p in _patch_all if p not in __all__])
+_patch_sdk()
diff --git a/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/aio/_operations/_operations.py b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/aio/_operations/_operations.py
new file mode 100644
index 00000000..eecf72f7
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/aio/_operations/_operations.py
@@ -0,0 +1,464 @@
+# pylint: disable=too-many-lines,too-many-statements
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from io import IOBase
+import sys
+from typing import Any, Callable, Dict, IO, List, Optional, Type, TypeVar, Union, overload
+
+from azure.core.exceptions import (
+    ClientAuthenticationError,
+    HttpResponseError,
+    ResourceExistsError,
+    ResourceNotFoundError,
+    ResourceNotModifiedError,
+    map_error,
+)
+from azure.core.pipeline import PipelineResponse
+from azure.core.rest import AsyncHttpResponse, HttpRequest
+from azure.core.tracing.decorator_async import distributed_trace_async
+from azure.core.utils import case_insensitive_dict
+
+from ... import models as _models
+from ..._operations._operations import build_quickpulse_is_subscribed_request, build_quickpulse_publish_request
+from .._vendor import QuickpulseClientMixinABC
+
+if sys.version_info >= (3, 9):
+    from collections.abc import MutableMapping
+else:
+    from typing import MutableMapping  # type: ignore  # pylint: disable=ungrouped-imports
+T = TypeVar("T")
+ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
+
+
+class QuickpulseClientOperationsMixin(QuickpulseClientMixinABC):
+    @overload
+    async def is_subscribed(
+        self,
+        endpoint: str = "https://global.livediagnostics.monitor.azure.com",
+        monitoring_data_point: Optional[_models.MonitoringDataPoint] = None,
+        *,
+        ikey: str,
+        transmission_time: Optional[int] = None,
+        machine_name: Optional[str] = None,
+        instance_name: Optional[str] = None,
+        stream_id: Optional[str] = None,
+        role_name: Optional[str] = None,
+        invariant_version: Optional[str] = None,
+        configuration_etag: Optional[str] = None,
+        content_type: str = "application/json",
+        **kwargs: Any
+    ) -> _models.CollectionConfigurationInfo:
+        """Determine whether there is any subscription to the metrics and documents.
+
+        :param endpoint: The endpoint of the Live Metrics service. Default value is
+         "https://global.livediagnostics.monitor.azure.com".
+        :type endpoint: str
+        :param monitoring_data_point: Data contract between Application Insights client SDK and Live
+         Metrics. /QuickPulseService.svc/ping uses this as a backup source of machine name, instance
+         name and invariant version. Default value is None.
+        :type monitoring_data_point: ~quickpulse_client.models.MonitoringDataPoint
+        :keyword ikey: The instrumentation key of the target Application Insights component for which
+         the client checks whether there's any subscription to it. Required.
+        :paramtype ikey: str
+        :keyword transmission_time: Timestamp when the client transmits the metrics and documents to
+         Live Metrics. A 8-byte long type of ticks. Default value is None.
+        :paramtype transmission_time: int
+        :keyword machine_name: Computer name where Application Insights SDK lives. Live Metrics uses
+         machine name with instance name as a backup. Default value is None.
+        :paramtype machine_name: str
+        :keyword instance_name: Service instance name where Application Insights SDK lives. Live
+         Metrics uses machine name with instance name as a backup. Default value is None.
+        :paramtype instance_name: str
+        :keyword stream_id: Identifies an Application Insights SDK as trusted agent to report metrics
+         and documents. Default value is None.
+        :paramtype stream_id: str
+        :keyword role_name: Cloud role name of the service. Default value is None.
+        :paramtype role_name: str
+        :keyword invariant_version: Version/generation of the data contract (MonitoringDataPoint)
+         between the client and Live Metrics. Default value is None.
+        :paramtype invariant_version: str
+        :keyword configuration_etag: An encoded string that indicates whether the collection
+         configuration is changed. Default value is None.
+        :paramtype configuration_etag: str
+        :keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
+         Default value is "application/json".
+        :paramtype content_type: str
+        :return: CollectionConfigurationInfo
+        :rtype: ~quickpulse_client.models.CollectionConfigurationInfo
+        :raises ~azure.core.exceptions.HttpResponseError:
+        """
+
+    @overload
+    async def is_subscribed(
+        self,
+        endpoint: str = "https://global.livediagnostics.monitor.azure.com",
+        monitoring_data_point: Optional[IO[bytes]] = None,
+        *,
+        ikey: str,
+        transmission_time: Optional[int] = None,
+        machine_name: Optional[str] = None,
+        instance_name: Optional[str] = None,
+        stream_id: Optional[str] = None,
+        role_name: Optional[str] = None,
+        invariant_version: Optional[str] = None,
+        configuration_etag: Optional[str] = None,
+        content_type: str = "application/json",
+        **kwargs: Any
+    ) -> _models.CollectionConfigurationInfo:
+        """Determine whether there is any subscription to the metrics and documents.
+
+        :param endpoint: The endpoint of the Live Metrics service. Default value is
+         "https://global.livediagnostics.monitor.azure.com".
+        :type endpoint: str
+        :param monitoring_data_point: Data contract between Application Insights client SDK and Live
+         Metrics. /QuickPulseService.svc/ping uses this as a backup source of machine name, instance
+         name and invariant version. Default value is None.
+        :type monitoring_data_point: IO[bytes]
+        :keyword ikey: The instrumentation key of the target Application Insights component for which
+         the client checks whether there's any subscription to it. Required.
+        :paramtype ikey: str
+        :keyword transmission_time: Timestamp when the client transmits the metrics and documents to
+         Live Metrics. A 8-byte long type of ticks. Default value is None.
+        :paramtype transmission_time: int
+        :keyword machine_name: Computer name where Application Insights SDK lives. Live Metrics uses
+         machine name with instance name as a backup. Default value is None.
+        :paramtype machine_name: str
+        :keyword instance_name: Service instance name where Application Insights SDK lives. Live
+         Metrics uses machine name with instance name as a backup. Default value is None.
+        :paramtype instance_name: str
+        :keyword stream_id: Identifies an Application Insights SDK as trusted agent to report metrics
+         and documents. Default value is None.
+        :paramtype stream_id: str
+        :keyword role_name: Cloud role name of the service. Default value is None.
+        :paramtype role_name: str
+        :keyword invariant_version: Version/generation of the data contract (MonitoringDataPoint)
+         between the client and Live Metrics. Default value is None.
+        :paramtype invariant_version: str
+        :keyword configuration_etag: An encoded string that indicates whether the collection
+         configuration is changed. Default value is None.
+        :paramtype configuration_etag: str
+        :keyword content_type: Body Parameter content-type. Content type parameter for binary body.
+         Default value is "application/json".
+        :paramtype content_type: str
+        :return: CollectionConfigurationInfo
+        :rtype: ~quickpulse_client.models.CollectionConfigurationInfo
+        :raises ~azure.core.exceptions.HttpResponseError:
+        """
+
+    # @distributed_trace_async
+    async def is_subscribed(
+        self,
+        endpoint: str = "https://global.livediagnostics.monitor.azure.com",
+        monitoring_data_point: Optional[Union[_models.MonitoringDataPoint, IO[bytes]]] = None,
+        *,
+        ikey: str,
+        transmission_time: Optional[int] = None,
+        machine_name: Optional[str] = None,
+        instance_name: Optional[str] = None,
+        stream_id: Optional[str] = None,
+        role_name: Optional[str] = None,
+        invariant_version: Optional[str] = None,
+        configuration_etag: Optional[str] = None,
+        **kwargs: Any
+    ) -> _models.CollectionConfigurationInfo:
+        """Determine whether there is any subscription to the metrics and documents.
+
+        :param endpoint: The endpoint of the Live Metrics service. Default value is
+         "https://global.livediagnostics.monitor.azure.com".
+        :type endpoint: str
+        :param monitoring_data_point: Data contract between Application Insights client SDK and Live
+         Metrics. /QuickPulseService.svc/ping uses this as a backup source of machine name, instance
+         name and invariant version. Is either a MonitoringDataPoint type or a IO[bytes] type. Default
+         value is None.
+        :type monitoring_data_point: ~quickpulse_client.models.MonitoringDataPoint or IO[bytes]
+        :keyword ikey: The instrumentation key of the target Application Insights component for which
+         the client checks whether there's any subscription to it. Required.
+        :paramtype ikey: str
+        :keyword transmission_time: Timestamp when the client transmits the metrics and documents to
+         Live Metrics. A 8-byte long type of ticks. Default value is None.
+        :paramtype transmission_time: int
+        :keyword machine_name: Computer name where Application Insights SDK lives. Live Metrics uses
+         machine name with instance name as a backup. Default value is None.
+        :paramtype machine_name: str
+        :keyword instance_name: Service instance name where Application Insights SDK lives. Live
+         Metrics uses machine name with instance name as a backup. Default value is None.
+        :paramtype instance_name: str
+        :keyword stream_id: Identifies an Application Insights SDK as trusted agent to report metrics
+         and documents. Default value is None.
+        :paramtype stream_id: str
+        :keyword role_name: Cloud role name of the service. Default value is None.
+        :paramtype role_name: str
+        :keyword invariant_version: Version/generation of the data contract (MonitoringDataPoint)
+         between the client and Live Metrics. Default value is None.
+        :paramtype invariant_version: str
+        :keyword configuration_etag: An encoded string that indicates whether the collection
+         configuration is changed. Default value is None.
+        :paramtype configuration_etag: str
+        :return: CollectionConfigurationInfo
+        :rtype: ~quickpulse_client.models.CollectionConfigurationInfo
+        :raises ~azure.core.exceptions.HttpResponseError:
+        """
+        error_map: MutableMapping[int, Type[HttpResponseError]] = {
+            401: ClientAuthenticationError,
+            404: ResourceNotFoundError,
+            409: ResourceExistsError,
+            304: ResourceNotModifiedError,
+        }
+        error_map.update(kwargs.pop("error_map", {}) or {})
+
+        _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
+        _params = kwargs.pop("params", {}) or {}
+
+        content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
+        cls: ClsType[_models.CollectionConfigurationInfo] = kwargs.pop("cls", None)
+
+        content_type = content_type or "application/json"
+        _json = None
+        _content = None
+        if isinstance(monitoring_data_point, (IOBase, bytes)):
+            _content = monitoring_data_point
+        else:
+            if monitoring_data_point is not None:
+                _json = self._serialize.body(monitoring_data_point, "MonitoringDataPoint")
+            else:
+                _json = None
+
+        _request = build_quickpulse_is_subscribed_request(
+            ikey=ikey,
+            transmission_time=transmission_time,
+            machine_name=machine_name,
+            instance_name=instance_name,
+            stream_id=stream_id,
+            role_name=role_name,
+            invariant_version=invariant_version,
+            configuration_etag=configuration_etag,
+            content_type=content_type,
+            api_version=self._config.api_version,
+            json=_json,
+            content=_content,
+            headers=_headers,
+            params=_params,
+        )
+        path_format_arguments = {
+            "endpoint": self._serialize.url("endpoint", endpoint, "str", skip_quote=True),
+        }
+        _request.url = self._client.format_url(_request.url, **path_format_arguments)
+
+        _stream = False
+        pipeline_response: PipelineResponse = await self._client._pipeline.run(  # type: ignore # pylint: disable=protected-access
+            _request, stream=_stream, **kwargs
+        )
+
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            if _stream:
+                await response.read()  # Load the body in memory and close the socket
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ServiceError, pipeline_response)
+            raise HttpResponseError(response=response, model=error)
+
+        response_headers = {}
+        response_headers["x-ms-qps-configuration-etag"] = self._deserialize(
+            "str", response.headers.get("x-ms-qps-configuration-etag")
+        )
+        response_headers["x-ms-qps-service-endpoint-redirect-v2"] = self._deserialize(
+            "str", response.headers.get("x-ms-qps-service-endpoint-redirect-v2")
+        )
+        response_headers["x-ms-qps-service-polling-interval-hint"] = self._deserialize(
+            "str", response.headers.get("x-ms-qps-service-polling-interval-hint")
+        )
+        response_headers["x-ms-qps-subscribed"] = self._deserialize("str", response.headers.get("x-ms-qps-subscribed"))
+
+        deserialized = self._deserialize("CollectionConfigurationInfo", pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, response_headers)  # type: ignore
+
+        return deserialized  # type: ignore
+
+    @overload
+    async def publish(
+        self,
+        endpoint: str = "https://global.livediagnostics.monitor.azure.com",
+        monitoring_data_points: Optional[List[_models.MonitoringDataPoint]] = None,
+        *,
+        ikey: str,
+        configuration_etag: Optional[str] = None,
+        transmission_time: Optional[int] = None,
+        content_type: str = "application/json",
+        **kwargs: Any
+    ) -> _models.CollectionConfigurationInfo:
+        """Publish live metrics to the Live Metrics service when there is an active subscription to the
+        metrics.
+
+        :param endpoint: The endpoint of the Live Metrics service. Default value is
+         "https://global.livediagnostics.monitor.azure.com".
+        :type endpoint: str
+        :param monitoring_data_points: Data contract between the client and Live Metrics.
+         /QuickPulseService.svc/ping uses this as a backup source of machine name, instance name and
+         invariant version. Default value is None.
+        :type monitoring_data_points: list[~quickpulse_client.models.MonitoringDataPoint]
+        :keyword ikey: The instrumentation key of the target Application Insights component for which
+         the client checks whether there's any subscription to it. Required.
+        :paramtype ikey: str
+        :keyword configuration_etag: An encoded string that indicates whether the collection
+         configuration is changed. Default value is None.
+        :paramtype configuration_etag: str
+        :keyword transmission_time: Timestamp when the client transmits the metrics and documents to
+         Live Metrics. A 8-byte long type of ticks. Default value is None.
+        :paramtype transmission_time: int
+        :keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
+         Default value is "application/json".
+        :paramtype content_type: str
+        :return: CollectionConfigurationInfo
+        :rtype: ~quickpulse_client.models.CollectionConfigurationInfo
+        :raises ~azure.core.exceptions.HttpResponseError:
+        """
+
+    @overload
+    async def publish(
+        self,
+        endpoint: str = "https://global.livediagnostics.monitor.azure.com",
+        monitoring_data_points: Optional[IO[bytes]] = None,
+        *,
+        ikey: str,
+        configuration_etag: Optional[str] = None,
+        transmission_time: Optional[int] = None,
+        content_type: str = "application/json",
+        **kwargs: Any
+    ) -> _models.CollectionConfigurationInfo:
+        """Publish live metrics to the Live Metrics service when there is an active subscription to the
+        metrics.
+
+        :param endpoint: The endpoint of the Live Metrics service. Default value is
+         "https://global.livediagnostics.monitor.azure.com".
+        :type endpoint: str
+        :param monitoring_data_points: Data contract between the client and Live Metrics.
+         /QuickPulseService.svc/ping uses this as a backup source of machine name, instance name and
+         invariant version. Default value is None.
+        :type monitoring_data_points: IO[bytes]
+        :keyword ikey: The instrumentation key of the target Application Insights component for which
+         the client checks whether there's any subscription to it. Required.
+        :paramtype ikey: str
+        :keyword configuration_etag: An encoded string that indicates whether the collection
+         configuration is changed. Default value is None.
+        :paramtype configuration_etag: str
+        :keyword transmission_time: Timestamp when the client transmits the metrics and documents to
+         Live Metrics. A 8-byte long type of ticks. Default value is None.
+        :paramtype transmission_time: int
+        :keyword content_type: Body Parameter content-type. Content type parameter for binary body.
+         Default value is "application/json".
+        :paramtype content_type: str
+        :return: CollectionConfigurationInfo
+        :rtype: ~quickpulse_client.models.CollectionConfigurationInfo
+        :raises ~azure.core.exceptions.HttpResponseError:
+        """
+
+    # @distributed_trace_async
+    async def publish(
+        self,
+        endpoint: str = "https://global.livediagnostics.monitor.azure.com",
+        monitoring_data_points: Optional[Union[List[_models.MonitoringDataPoint], IO[bytes]]] = None,
+        *,
+        ikey: str,
+        configuration_etag: Optional[str] = None,
+        transmission_time: Optional[int] = None,
+        **kwargs: Any
+    ) -> _models.CollectionConfigurationInfo:
+        """Publish live metrics to the Live Metrics service when there is an active subscription to the
+        metrics.
+
+        :param endpoint: The endpoint of the Live Metrics service. Default value is
+         "https://global.livediagnostics.monitor.azure.com".
+        :type endpoint: str
+        :param monitoring_data_points: Data contract between the client and Live Metrics.
+         /QuickPulseService.svc/ping uses this as a backup source of machine name, instance name and
+         invariant version. Is either a [MonitoringDataPoint] type or a IO[bytes] type. Default value is
+         None.
+        :type monitoring_data_points: list[~quickpulse_client.models.MonitoringDataPoint] or IO[bytes]
+        :keyword ikey: The instrumentation key of the target Application Insights component for which
+         the client checks whether there's any subscription to it. Required.
+        :paramtype ikey: str
+        :keyword configuration_etag: An encoded string that indicates whether the collection
+         configuration is changed. Default value is None.
+        :paramtype configuration_etag: str
+        :keyword transmission_time: Timestamp when the client transmits the metrics and documents to
+         Live Metrics. A 8-byte long type of ticks. Default value is None.
+        :paramtype transmission_time: int
+        :return: CollectionConfigurationInfo
+        :rtype: ~quickpulse_client.models.CollectionConfigurationInfo
+        :raises ~azure.core.exceptions.HttpResponseError:
+        """
+        error_map: MutableMapping[int, Type[HttpResponseError]] = {
+            401: ClientAuthenticationError,
+            404: ResourceNotFoundError,
+            409: ResourceExistsError,
+            304: ResourceNotModifiedError,
+        }
+        error_map.update(kwargs.pop("error_map", {}) or {})
+
+        _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
+        _params = kwargs.pop("params", {}) or {}
+
+        content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
+        cls: ClsType[_models.CollectionConfigurationInfo] = kwargs.pop("cls", None)
+
+        content_type = content_type or "application/json"
+        _json = None
+        _content = None
+        if isinstance(monitoring_data_points, (IOBase, bytes)):
+            _content = monitoring_data_points
+        else:
+            if monitoring_data_points is not None:
+                _json = self._serialize.body(monitoring_data_points, "[MonitoringDataPoint]")
+            else:
+                _json = None
+
+        _request = build_quickpulse_publish_request(
+            ikey=ikey,
+            configuration_etag=configuration_etag,
+            transmission_time=transmission_time,
+            content_type=content_type,
+            api_version=self._config.api_version,
+            json=_json,
+            content=_content,
+            headers=_headers,
+            params=_params,
+        )
+        path_format_arguments = {
+            "endpoint": self._serialize.url("endpoint", endpoint, "str", skip_quote=True),
+        }
+        _request.url = self._client.format_url(_request.url, **path_format_arguments)
+
+        _stream = False
+        pipeline_response: PipelineResponse = await self._client._pipeline.run(  # type: ignore # pylint: disable=protected-access
+            _request, stream=_stream, **kwargs
+        )
+
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            if _stream:
+                await response.read()  # Load the body in memory and close the socket
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ServiceError, pipeline_response)
+            raise HttpResponseError(response=response, model=error)
+
+        response_headers = {}
+        response_headers["x-ms-qps-configuration-etag"] = self._deserialize(
+            "str", response.headers.get("x-ms-qps-configuration-etag")
+        )
+        response_headers["x-ms-qps-subscribed"] = self._deserialize("str", response.headers.get("x-ms-qps-subscribed"))
+
+        deserialized = self._deserialize("CollectionConfigurationInfo", pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, response_headers)  # type: ignore
+
+        return deserialized  # type: ignore
diff --git a/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/aio/_operations/_patch.py b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/aio/_operations/_patch.py
new file mode 100644
index 00000000..f7dd3251
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/aio/_operations/_patch.py
@@ -0,0 +1,20 @@
+# ------------------------------------
+# Copyright (c) Microsoft Corporation.
+# Licensed under the MIT License.
+# ------------------------------------
+"""Customize generated code here.
+
+Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize
+"""
+from typing import List
+
+__all__: List[str] = []  # Add all objects you want publicly available to users at this package level
+
+
+def patch_sdk():
+    """Do not remove from this file.
+
+    `patch_sdk` is a last resort escape hatch that allows you to do customizations
+    you can't accomplish using the techniques described in
+    https://aka.ms/azsdk/python/dpcodegen/python/customize
+    """
diff --git a/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/aio/_patch.py b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/aio/_patch.py
new file mode 100644
index 00000000..f7dd3251
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/aio/_patch.py
@@ -0,0 +1,20 @@
+# ------------------------------------
+# Copyright (c) Microsoft Corporation.
+# Licensed under the MIT License.
+# ------------------------------------
+"""Customize generated code here.
+
+Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize
+"""
+from typing import List
+
+__all__: List[str] = []  # Add all objects you want publicly available to users at this package level
+
+
+def patch_sdk():
+    """Do not remove from this file.
+
+    `patch_sdk` is a last resort escape hatch that allows you to do customizations
+    you can't accomplish using the techniques described in
+    https://aka.ms/azsdk/python/dpcodegen/python/customize
+    """
diff --git a/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/aio/_vendor.py b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/aio/_vendor.py
new file mode 100644
index 00000000..40b0a738
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/aio/_vendor.py
@@ -0,0 +1,26 @@
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from abc import ABC
+from typing import TYPE_CHECKING
+
+from ._configuration import QuickpulseClientConfiguration
+
+if TYPE_CHECKING:
+    # pylint: disable=unused-import,ungrouped-imports
+    from azure.core import AsyncPipelineClient
+
+    from .._serialization import Deserializer, Serializer
+
+
+class QuickpulseClientMixinABC(ABC):
+    """DO NOT use this class. It is for internal typing use only."""
+
+    _client: "AsyncPipelineClient"
+    _config: QuickpulseClientConfiguration
+    _serialize: "Serializer"
+    _deserialize: "Deserializer"
diff --git a/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/models/__init__.py b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/models/__init__.py
new file mode 100644
index 00000000..f9d7183a
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/models/__init__.py
@@ -0,0 +1,65 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from ._models import CollectionConfigurationError
+from ._models import CollectionConfigurationInfo
+from ._models import DerivedMetricInfo
+from ._models import DocumentFilterConjunctionGroupInfo
+from ._models import DocumentIngress
+from ._models import DocumentStreamInfo
+from ._models import Event
+from ._models import Exception
+from ._models import FilterConjunctionGroupInfo
+from ._models import FilterInfo
+from ._models import KeyValuePairString
+from ._models import MetricPoint
+from ._models import MonitoringDataPoint
+from ._models import ProcessCpuData
+from ._models import QuotaConfigurationInfo
+from ._models import RemoteDependency
+from ._models import Request
+from ._models import ServiceError
+from ._models import Trace
+
+from ._enums import AggregationType
+from ._enums import CollectionConfigurationErrorType
+from ._enums import DocumentType
+from ._enums import PredicateType
+from ._enums import TelemetryType
+from ._patch import __all__ as _patch_all
+from ._patch import *  # pylint: disable=unused-wildcard-import
+from ._patch import patch_sdk as _patch_sdk
+
+__all__ = [
+    "CollectionConfigurationError",
+    "CollectionConfigurationInfo",
+    "DerivedMetricInfo",
+    "DocumentFilterConjunctionGroupInfo",
+    "DocumentIngress",
+    "DocumentStreamInfo",
+    "Event",
+    "Exception",
+    "FilterConjunctionGroupInfo",
+    "FilterInfo",
+    "KeyValuePairString",
+    "MetricPoint",
+    "MonitoringDataPoint",
+    "ProcessCpuData",
+    "QuotaConfigurationInfo",
+    "RemoteDependency",
+    "Request",
+    "ServiceError",
+    "Trace",
+    "AggregationType",
+    "CollectionConfigurationErrorType",
+    "DocumentType",
+    "PredicateType",
+    "TelemetryType",
+]
+__all__.extend([p for p in _patch_all if p not in __all__])
+_patch_sdk()
diff --git a/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/models/_enums.py b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/models/_enums.py
new file mode 100644
index 00000000..c39af21f
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/models/_enums.py
@@ -0,0 +1,111 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from enum import Enum
+from azure.core import CaseInsensitiveEnumMeta
+
+
+class AggregationType(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+    """Aggregation type."""
+
+    AVG = "Avg"
+    """Average"""
+    SUM = "Sum"
+    """Sum"""
+    MIN = "Min"
+    """Minimum"""
+    MAX = "Max"
+    """Maximum"""
+
+
+class CollectionConfigurationErrorType(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+    """Collection configuration error type reported by the client SDK."""
+
+    UNKNOWN = "Unknown"
+    """Unknown error type."""
+    PERFORMANCE_COUNTER_PARSING = "PerformanceCounterParsing"
+    """Performance counter parsing error."""
+    PERFORMANCE_COUNTER_UNEXPECTED = "PerformanceCounterUnexpected"
+    """Performance counter unexpected error."""
+    PERFORMANCE_COUNTER_DUPLICATE_IDS = "PerformanceCounterDuplicateIds"
+    """Performance counter duplicate ids."""
+    DOCUMENT_STREAM_DUPLICATE_IDS = "DocumentStreamDuplicateIds"
+    """Document stream duplication ids."""
+    DOCUMENT_STREAM_FAILURE_TO_CREATE = "DocumentStreamFailureToCreate"
+    """Document stream failed to create."""
+    DOCUMENT_STREAM_FAILURE_TO_CREATE_FILTER_UNEXPECTED = "DocumentStreamFailureToCreateFilterUnexpected"
+    """Document stream failed to create filter unexpectedly."""
+    METRIC_DUPLICATE_IDS = "MetricDuplicateIds"
+    """Metric duplicate ids."""
+    METRIC_TELEMETRY_TYPE_UNSUPPORTED = "MetricTelemetryTypeUnsupported"
+    """Metric telemetry type unsupported."""
+    METRIC_FAILURE_TO_CREATE = "MetricFailureToCreate"
+    """Metric failed to create."""
+    METRIC_FAILURE_TO_CREATE_FILTER_UNEXPECTED = "MetricFailureToCreateFilterUnexpected"
+    """Metric failed to create filter unexpectedly."""
+    FILTER_FAILURE_TO_CREATE_UNEXPECTED = "FilterFailureToCreateUnexpected"
+    """Filter failed to create unexpectedly."""
+    COLLECTION_CONFIGURATION_FAILURE_TO_CREATE_UNEXPECTED = "CollectionConfigurationFailureToCreateUnexpected"
+    """Collection configuration failed to create unexpectedly."""
+
+
+class DocumentType(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+    """Document type."""
+
+    REQUEST = "Request"
+    """Represents a request telemetry type."""
+    REMOTE_DEPENDENCY = "RemoteDependency"
+    """Represents a remote dependency telemetry type."""
+    EXCEPTION = "Exception"
+    """Represents an exception telemetry type."""
+    EVENT = "Event"
+    """Represents an event telemetry type."""
+    TRACE = "Trace"
+    """Represents a trace telemetry type."""
+    UNKNOWN = "Unknown"
+    """Represents an unknown telemetry type."""
+
+
+class PredicateType(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+    """Enum representing the different types of predicates."""
+
+    EQUAL = "Equal"
+    """Represents an equality predicate."""
+    NOT_EQUAL = "NotEqual"
+    """Represents a not-equal predicate."""
+    LESS_THAN = "LessThan"
+    """Represents a less-than predicate."""
+    GREATER_THAN = "GreaterThan"
+    """Represents a greater-than predicate."""
+    LESS_THAN_OR_EQUAL = "LessThanOrEqual"
+    """Represents a less-than-or-equal predicate."""
+    GREATER_THAN_OR_EQUAL = "GreaterThanOrEqual"
+    """Represents a greater-than-or-equal predicate."""
+    CONTAINS = "Contains"
+    """Represents a contains predicate."""
+    DOES_NOT_CONTAIN = "DoesNotContain"
+    """Represents a does-not-contain predicate."""
+
+
+class TelemetryType(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+    """Telemetry type."""
+
+    REQUEST = "Request"
+    """Represents a request telemetry type."""
+    DEPENDENCY = "Dependency"
+    """Represents a dependency telemetry type."""
+    EXCEPTION = "Exception"
+    """Represents an exception telemetry type."""
+    EVENT = "Event"
+    """Represents an event telemetry type."""
+    METRIC = "Metric"
+    """Represents a metric telemetry type."""
+    PERFORMANCE_COUNTER = "PerformanceCounter"
+    """Represents a performance counter telemetry type."""
+    TRACE = "Trace"
+    """Represents a trace telemetry type."""
diff --git a/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/models/_models.py b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/models/_models.py
new file mode 100644
index 00000000..8e22c899
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/models/_models.py
@@ -0,0 +1,1123 @@
+# coding=utf-8
+# pylint: disable=too-many-lines
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+import datetime
+from typing import Any, List, Optional, TYPE_CHECKING, Union
+
+from .. import _serialization
+
+if TYPE_CHECKING:
+    # pylint: disable=unused-import,ungrouped-imports
+    from .. import models as _models
+
+
+class CollectionConfigurationError(_serialization.Model):
+    """Represents an error while SDK parses and applies an instance of CollectionConfigurationInfo.
+
+    All required parameters must be populated in order to send to server.
+
+    :ivar collection_configuration_error_type: Error type. Required. Known values are: "Unknown",
+     "PerformanceCounterParsing", "PerformanceCounterUnexpected", "PerformanceCounterDuplicateIds",
+     "DocumentStreamDuplicateIds", "DocumentStreamFailureToCreate",
+     "DocumentStreamFailureToCreateFilterUnexpected", "MetricDuplicateIds",
+     "MetricTelemetryTypeUnsupported", "MetricFailureToCreate",
+     "MetricFailureToCreateFilterUnexpected", "FilterFailureToCreateUnexpected", and
+     "CollectionConfigurationFailureToCreateUnexpected".
+    :vartype collection_configuration_error_type: str or
+     ~quickpulse_client.models.CollectionConfigurationErrorType
+    :ivar message: Error message. Required.
+    :vartype message: str
+    :ivar full_exception: Exception that led to the creation of the configuration error. Required.
+    :vartype full_exception: str
+    :ivar data: Custom properties to add more information to the error. Required.
+    :vartype data: list[~quickpulse_client.models.KeyValuePairString]
+    """
+
+    _validation = {
+        "collection_configuration_error_type": {"required": True},
+        "message": {"required": True},
+        "full_exception": {"required": True},
+        "data": {"required": True},
+    }
+
+    _attribute_map = {
+        "collection_configuration_error_type": {"key": "CollectionConfigurationErrorType", "type": "str"},
+        "message": {"key": "Message", "type": "str"},
+        "full_exception": {"key": "FullException", "type": "str"},
+        "data": {"key": "Data", "type": "[KeyValuePairString]"},
+    }
+
+    def __init__(
+        self,
+        *,
+        collection_configuration_error_type: Union[str, "_models.CollectionConfigurationErrorType"],
+        message: str,
+        full_exception: str,
+        data: List["_models.KeyValuePairString"],
+        **kwargs: Any
+    ) -> None:
+        """
+        :keyword collection_configuration_error_type: Error type. Required. Known values are:
+         "Unknown", "PerformanceCounterParsing", "PerformanceCounterUnexpected",
+         "PerformanceCounterDuplicateIds", "DocumentStreamDuplicateIds",
+         "DocumentStreamFailureToCreate", "DocumentStreamFailureToCreateFilterUnexpected",
+         "MetricDuplicateIds", "MetricTelemetryTypeUnsupported", "MetricFailureToCreate",
+         "MetricFailureToCreateFilterUnexpected", "FilterFailureToCreateUnexpected", and
+         "CollectionConfigurationFailureToCreateUnexpected".
+        :paramtype collection_configuration_error_type: str or
+         ~quickpulse_client.models.CollectionConfigurationErrorType
+        :keyword message: Error message. Required.
+        :paramtype message: str
+        :keyword full_exception: Exception that led to the creation of the configuration error.
+         Required.
+        :paramtype full_exception: str
+        :keyword data: Custom properties to add more information to the error. Required.
+        :paramtype data: list[~quickpulse_client.models.KeyValuePairString]
+        """
+        super().__init__(**kwargs)
+        self.collection_configuration_error_type = collection_configuration_error_type
+        self.message = message
+        self.full_exception = full_exception
+        self.data = data
+
+
+class CollectionConfigurationInfo(_serialization.Model):
+    """Represents the collection configuration - a customizable description of performance counters,
+    metrics, and full telemetry documents to be collected by the client SDK.
+
+    All required parameters must be populated in order to send to server.
+
+    :ivar e_tag: An encoded string that indicates whether the collection configuration is changed.
+     Required.
+    :vartype e_tag: str
+    :ivar metrics: An array of metric configuration info. Required.
+    :vartype metrics: list[~quickpulse_client.models.DerivedMetricInfo]
+    :ivar document_streams: An array of document stream configuration info. Required.
+    :vartype document_streams: list[~quickpulse_client.models.DocumentStreamInfo]
+    :ivar quota_info: Controls document quotas to be sent to Live Metrics.
+    :vartype quota_info: ~quickpulse_client.models.QuotaConfigurationInfo
+    """
+
+    _validation = {
+        "e_tag": {"required": True},
+        "metrics": {"required": True},
+        "document_streams": {"required": True},
+    }
+
+    _attribute_map = {
+        "e_tag": {"key": "ETag", "type": "str"},
+        "metrics": {"key": "Metrics", "type": "[DerivedMetricInfo]"},
+        "document_streams": {"key": "DocumentStreams", "type": "[DocumentStreamInfo]"},
+        "quota_info": {"key": "QuotaInfo", "type": "QuotaConfigurationInfo"},
+    }
+
+    def __init__(
+        self,
+        *,
+        e_tag: str,
+        metrics: List["_models.DerivedMetricInfo"],
+        document_streams: List["_models.DocumentStreamInfo"],
+        quota_info: Optional["_models.QuotaConfigurationInfo"] = None,
+        **kwargs: Any
+    ) -> None:
+        """
+        :keyword e_tag: An encoded string that indicates whether the collection configuration is
+         changed. Required.
+        :paramtype e_tag: str
+        :keyword metrics: An array of metric configuration info. Required.
+        :paramtype metrics: list[~quickpulse_client.models.DerivedMetricInfo]
+        :keyword document_streams: An array of document stream configuration info. Required.
+        :paramtype document_streams: list[~quickpulse_client.models.DocumentStreamInfo]
+        :keyword quota_info: Controls document quotas to be sent to Live Metrics.
+        :paramtype quota_info: ~quickpulse_client.models.QuotaConfigurationInfo
+        """
+        super().__init__(**kwargs)
+        self.e_tag = e_tag
+        self.metrics = metrics
+        self.document_streams = document_streams
+        self.quota_info = quota_info
+
+
+class DerivedMetricInfo(_serialization.Model):
+    """A metric configuration set by UX to scope the metrics it's interested in.
+
+    All required parameters must be populated in order to send to server.
+
+    :ivar id: metric configuration identifier. Required.
+    :vartype id: str
+    :ivar telemetry_type: Telemetry type. Required.
+    :vartype telemetry_type: str
+    :ivar filter_groups: A collection of filters to scope metrics that UX needs. Required.
+    :vartype filter_groups: list[~quickpulse_client.models.FilterConjunctionGroupInfo]
+    :ivar projection: Telemetry's metric dimension whose value is to be aggregated. Example values:
+     Duration, Count(),... Required.
+    :vartype projection: str
+    :ivar aggregation: Aggregation type. This is the aggregation done from everything within a
+     single server. Required. Known values are: "Avg", "Sum", "Min", and "Max".
+    :vartype aggregation: str or ~quickpulse_client.models.AggregationType
+    :ivar back_end_aggregation: Aggregation type. This Aggregation is done across the values for
+     all the servers taken together. Required. Known values are: "Avg", "Sum", "Min", and "Max".
+    :vartype back_end_aggregation: str or ~quickpulse_client.models.AggregationType
+    """
+
+    _validation = {
+        "id": {"required": True},
+        "telemetry_type": {"required": True},
+        "filter_groups": {"required": True},
+        "projection": {"required": True},
+        "aggregation": {"required": True},
+        "back_end_aggregation": {"required": True},
+    }
+
+    _attribute_map = {
+        "id": {"key": "Id", "type": "str"},
+        "telemetry_type": {"key": "TelemetryType", "type": "str"},
+        "filter_groups": {"key": "FilterGroups", "type": "[FilterConjunctionGroupInfo]"},
+        "projection": {"key": "Projection", "type": "str"},
+        "aggregation": {"key": "Aggregation", "type": "str"},
+        "back_end_aggregation": {"key": "BackEndAggregation", "type": "str"},
+    }
+
+    def __init__(
+        self,
+        *,
+        id: str,  # pylint: disable=redefined-builtin
+        telemetry_type: str,
+        filter_groups: List["_models.FilterConjunctionGroupInfo"],
+        projection: str,
+        aggregation: Union[str, "_models.AggregationType"],
+        back_end_aggregation: Union[str, "_models.AggregationType"],
+        **kwargs: Any
+    ) -> None:
+        """
+        :keyword id: metric configuration identifier. Required.
+        :paramtype id: str
+        :keyword telemetry_type: Telemetry type. Required.
+        :paramtype telemetry_type: str
+        :keyword filter_groups: A collection of filters to scope metrics that UX needs. Required.
+        :paramtype filter_groups: list[~quickpulse_client.models.FilterConjunctionGroupInfo]
+        :keyword projection: Telemetry's metric dimension whose value is to be aggregated. Example
+         values: Duration, Count(),... Required.
+        :paramtype projection: str
+        :keyword aggregation: Aggregation type. This is the aggregation done from everything within a
+         single server. Required. Known values are: "Avg", "Sum", "Min", and "Max".
+        :paramtype aggregation: str or ~quickpulse_client.models.AggregationType
+        :keyword back_end_aggregation: Aggregation type. This Aggregation is done across the values for
+         all the servers taken together. Required. Known values are: "Avg", "Sum", "Min", and "Max".
+        :paramtype back_end_aggregation: str or ~quickpulse_client.models.AggregationType
+        """
+        super().__init__(**kwargs)
+        self.id = id
+        self.telemetry_type = telemetry_type
+        self.filter_groups = filter_groups
+        self.projection = projection
+        self.aggregation = aggregation
+        self.back_end_aggregation = back_end_aggregation
+
+
+class DocumentFilterConjunctionGroupInfo(_serialization.Model):
+    """A collection of filters for a specific telemetry type.
+
+    All required parameters must be populated in order to send to server.
+
+    :ivar telemetry_type: Telemetry type. Required. Known values are: "Request", "Dependency",
+     "Exception", "Event", "Metric", "PerformanceCounter", and "Trace".
+    :vartype telemetry_type: str or ~quickpulse_client.models.TelemetryType
+    :ivar filters: An array of filter groups. Required.
+    :vartype filters: ~quickpulse_client.models.FilterConjunctionGroupInfo
+    """
+
+    _validation = {
+        "telemetry_type": {"required": True},
+        "filters": {"required": True},
+    }
+
+    _attribute_map = {
+        "telemetry_type": {"key": "TelemetryType", "type": "str"},
+        "filters": {"key": "Filters", "type": "FilterConjunctionGroupInfo"},
+    }
+
+    def __init__(
+        self,
+        *,
+        telemetry_type: Union[str, "_models.TelemetryType"],
+        filters: "_models.FilterConjunctionGroupInfo",
+        **kwargs: Any
+    ) -> None:
+        """
+        :keyword telemetry_type: Telemetry type. Required. Known values are: "Request", "Dependency",
+         "Exception", "Event", "Metric", "PerformanceCounter", and "Trace".
+        :paramtype telemetry_type: str or ~quickpulse_client.models.TelemetryType
+        :keyword filters: An array of filter groups. Required.
+        :paramtype filters: ~quickpulse_client.models.FilterConjunctionGroupInfo
+        """
+        super().__init__(**kwargs)
+        self.telemetry_type = telemetry_type
+        self.filters = filters
+
+
+class DocumentIngress(_serialization.Model):
+    """Base class of the specific document types.
+
+    You probably want to use the sub-classes and not this class directly. Known sub-classes are:
+    Event, Exception, RemoteDependency, Request, Trace
+
+    All required parameters must be populated in order to send to server.
+
+    :ivar document_type: Telemetry type. Types not defined in enum will get replaced with a
+     'Unknown' type. Required. Known values are: "Request", "RemoteDependency", "Exception",
+     "Event", "Trace", and "Unknown".
+    :vartype document_type: str or ~quickpulse_client.models.DocumentType
+    :ivar document_stream_ids: An array of document streaming ids. Each id identifies a flow of
+     documents customized by UX customers.
+    :vartype document_stream_ids: list[str]
+    :ivar properties: Collection of custom properties.
+    :vartype properties: list[~quickpulse_client.models.KeyValuePairString]
+    """
+
+    _validation = {
+        "document_type": {"required": True},
+    }
+
+    _attribute_map = {
+        "document_type": {"key": "DocumentType", "type": "str"},
+        "document_stream_ids": {"key": "DocumentStreamIds", "type": "[str]"},
+        "properties": {"key": "Properties", "type": "[KeyValuePairString]"},
+    }
+
+    _subtype_map = {
+        "document_type": {
+            "Event": "Event",
+            "Exception": "Exception",
+            "RemoteDependency": "RemoteDependency",
+            "Request": "Request",
+            "Trace": "Trace",
+        }
+    }
+
+    def __init__(
+        self,
+        *,
+        document_stream_ids: Optional[List[str]] = None,
+        properties: Optional[List["_models.KeyValuePairString"]] = None,
+        **kwargs: Any
+    ) -> None:
+        """
+        :keyword document_stream_ids: An array of document streaming ids. Each id identifies a flow of
+         documents customized by UX customers.
+        :paramtype document_stream_ids: list[str]
+        :keyword properties: Collection of custom properties.
+        :paramtype properties: list[~quickpulse_client.models.KeyValuePairString]
+        """
+        super().__init__(**kwargs)
+        self.document_type: Optional[str] = None
+        self.document_stream_ids = document_stream_ids
+        self.properties = properties
+
+
+class DocumentStreamInfo(_serialization.Model):
+    """Configurations/filters set by UX to scope the document/telemetry it's interested in.
+
+    All required parameters must be populated in order to send to server.
+
+    :ivar id: Identifier of the document stream initiated by a UX. Required.
+    :vartype id: str
+    :ivar document_filter_groups: Gets or sets an OR-connected collection of filter groups.
+     Required.
+    :vartype document_filter_groups:
+     list[~quickpulse_client.models.DocumentFilterConjunctionGroupInfo]
+    """
+
+    _validation = {
+        "id": {"required": True},
+        "document_filter_groups": {"required": True},
+    }
+
+    _attribute_map = {
+        "id": {"key": "Id", "type": "str"},
+        "document_filter_groups": {"key": "DocumentFilterGroups", "type": "[DocumentFilterConjunctionGroupInfo]"},
+    }
+
+    def __init__(
+        self,
+        *,
+        id: str,  # pylint: disable=redefined-builtin
+        document_filter_groups: List["_models.DocumentFilterConjunctionGroupInfo"],
+        **kwargs: Any
+    ) -> None:
+        """
+        :keyword id: Identifier of the document stream initiated by a UX. Required.
+        :paramtype id: str
+        :keyword document_filter_groups: Gets or sets an OR-connected collection of filter groups.
+         Required.
+        :paramtype document_filter_groups:
+         list[~quickpulse_client.models.DocumentFilterConjunctionGroupInfo]
+        """
+        super().__init__(**kwargs)
+        self.id = id
+        self.document_filter_groups = document_filter_groups
+
+
+class Event(DocumentIngress):
+    """Event document type.
+
+    All required parameters must be populated in order to send to server.
+
+    :ivar document_type: Telemetry type. Types not defined in enum will get replaced with a
+     'Unknown' type. Required. Known values are: "Request", "RemoteDependency", "Exception",
+     "Event", "Trace", and "Unknown".
+    :vartype document_type: str or ~quickpulse_client.models.DocumentType
+    :ivar document_stream_ids: An array of document streaming ids. Each id identifies a flow of
+     documents customized by UX customers.
+    :vartype document_stream_ids: list[str]
+    :ivar properties: Collection of custom properties.
+    :vartype properties: list[~quickpulse_client.models.KeyValuePairString]
+    :ivar name: Event name.
+    :vartype name: str
+    """
+
+    _validation = {
+        "document_type": {"required": True},
+        "name": {"max_length": 512},
+    }
+
+    _attribute_map = {
+        "document_type": {"key": "DocumentType", "type": "str"},
+        "document_stream_ids": {"key": "DocumentStreamIds", "type": "[str]"},
+        "properties": {"key": "Properties", "type": "[KeyValuePairString]"},
+        "name": {"key": "Name", "type": "str"},
+    }
+
+    def __init__(
+        self,
+        *,
+        document_stream_ids: Optional[List[str]] = None,
+        properties: Optional[List["_models.KeyValuePairString"]] = None,
+        name: Optional[str] = None,
+        **kwargs: Any
+    ) -> None:
+        """
+        :keyword document_stream_ids: An array of document streaming ids. Each id identifies a flow of
+         documents customized by UX customers.
+        :paramtype document_stream_ids: list[str]
+        :keyword properties: Collection of custom properties.
+        :paramtype properties: list[~quickpulse_client.models.KeyValuePairString]
+        :keyword name: Event name.
+        :paramtype name: str
+        """
+        super().__init__(document_stream_ids=document_stream_ids, properties=properties, **kwargs)
+        self.document_type: str = "Event"
+        self.name = name
+
+
+class Exception(DocumentIngress):
+    """Exception document type.
+
+    All required parameters must be populated in order to send to server.
+
+    :ivar document_type: Telemetry type. Types not defined in enum will get replaced with a
+     'Unknown' type. Required. Known values are: "Request", "RemoteDependency", "Exception",
+     "Event", "Trace", and "Unknown".
+    :vartype document_type: str or ~quickpulse_client.models.DocumentType
+    :ivar document_stream_ids: An array of document streaming ids. Each id identifies a flow of
+     documents customized by UX customers.
+    :vartype document_stream_ids: list[str]
+    :ivar properties: Collection of custom properties.
+    :vartype properties: list[~quickpulse_client.models.KeyValuePairString]
+    :ivar exception_type: Exception type name.
+    :vartype exception_type: str
+    :ivar exception_message: Exception message.
+    :vartype exception_message: str
+    """
+
+    _validation = {
+        "document_type": {"required": True},
+        "exception_type": {"max_length": 1024},
+        "exception_message": {"max_length": 32768},
+    }
+
+    _attribute_map = {
+        "document_type": {"key": "DocumentType", "type": "str"},
+        "document_stream_ids": {"key": "DocumentStreamIds", "type": "[str]"},
+        "properties": {"key": "Properties", "type": "[KeyValuePairString]"},
+        "exception_type": {"key": "ExceptionType", "type": "str"},
+        "exception_message": {"key": "ExceptionMessage", "type": "str"},
+    }
+
+    def __init__(
+        self,
+        *,
+        document_stream_ids: Optional[List[str]] = None,
+        properties: Optional[List["_models.KeyValuePairString"]] = None,
+        exception_type: Optional[str] = None,
+        exception_message: Optional[str] = None,
+        **kwargs: Any
+    ) -> None:
+        """
+        :keyword document_stream_ids: An array of document streaming ids. Each id identifies a flow of
+         documents customized by UX customers.
+        :paramtype document_stream_ids: list[str]
+        :keyword properties: Collection of custom properties.
+        :paramtype properties: list[~quickpulse_client.models.KeyValuePairString]
+        :keyword exception_type: Exception type name.
+        :paramtype exception_type: str
+        :keyword exception_message: Exception message.
+        :paramtype exception_message: str
+        """
+        super().__init__(document_stream_ids=document_stream_ids, properties=properties, **kwargs)
+        self.document_type: str = "Exception"
+        self.exception_type = exception_type
+        self.exception_message = exception_message
+
+
+class FilterConjunctionGroupInfo(_serialization.Model):
+    """An AND-connected group of FilterInfo objects.
+
+    All required parameters must be populated in order to send to server.
+
+    :ivar filters: An array of filters. Required.
+    :vartype filters: list[~quickpulse_client.models.FilterInfo]
+    """
+
+    _validation = {
+        "filters": {"required": True},
+    }
+
+    _attribute_map = {
+        "filters": {"key": "Filters", "type": "[FilterInfo]"},
+    }
+
+    def __init__(self, *, filters: List["_models.FilterInfo"], **kwargs: Any) -> None:
+        """
+        :keyword filters: An array of filters. Required.
+        :paramtype filters: list[~quickpulse_client.models.FilterInfo]
+        """
+        super().__init__(**kwargs)
+        self.filters = filters
+
+
+class FilterInfo(_serialization.Model):
+    """A filter set on UX.
+
+    All required parameters must be populated in order to send to server.
+
+    :ivar field_name: dimension name of the filter. Required.
+    :vartype field_name: str
+    :ivar predicate: Operator of the filter. Required. Known values are: "Equal", "NotEqual",
+     "LessThan", "GreaterThan", "LessThanOrEqual", "GreaterThanOrEqual", "Contains", and
+     "DoesNotContain".
+    :vartype predicate: str or ~quickpulse_client.models.PredicateType
+    :ivar comparand: Comparand of the filter. Required.
+    :vartype comparand: str
+    """
+
+    _validation = {
+        "field_name": {"required": True},
+        "predicate": {"required": True},
+        "comparand": {"required": True},
+    }
+
+    _attribute_map = {
+        "field_name": {"key": "FieldName", "type": "str"},
+        "predicate": {"key": "Predicate", "type": "str"},
+        "comparand": {"key": "Comparand", "type": "str"},
+    }
+
+    def __init__(
+        self, *, field_name: str, predicate: Union[str, "_models.PredicateType"], comparand: str, **kwargs: Any
+    ) -> None:
+        """
+        :keyword field_name: dimension name of the filter. Required.
+        :paramtype field_name: str
+        :keyword predicate: Operator of the filter. Required. Known values are: "Equal", "NotEqual",
+         "LessThan", "GreaterThan", "LessThanOrEqual", "GreaterThanOrEqual", "Contains", and
+         "DoesNotContain".
+        :paramtype predicate: str or ~quickpulse_client.models.PredicateType
+        :keyword comparand: Comparand of the filter. Required.
+        :paramtype comparand: str
+        """
+        super().__init__(**kwargs)
+        self.field_name = field_name
+        self.predicate = predicate
+        self.comparand = comparand
+
+
+class KeyValuePairString(_serialization.Model):
+    """Key-value pair of string and string.
+
+    All required parameters must be populated in order to send to server.
+
+    :ivar key: Key of the key-value pair. Required.
+    :vartype key: str
+    :ivar value: Value of the key-value pair. Required.
+    :vartype value: str
+    """
+
+    _validation = {
+        "key": {"required": True},
+        "value": {"required": True},
+    }
+
+    _attribute_map = {
+        "key": {"key": "key", "type": "str"},
+        "value": {"key": "value", "type": "str"},
+    }
+
+    def __init__(self, *, key: str, value: str, **kwargs: Any) -> None:
+        """
+        :keyword key: Key of the key-value pair. Required.
+        :paramtype key: str
+        :keyword value: Value of the key-value pair. Required.
+        :paramtype value: str
+        """
+        super().__init__(**kwargs)
+        self.key = key
+        self.value = value
+
+
+class MetricPoint(_serialization.Model):
+    """Metric data point.
+
+    All required parameters must be populated in order to send to server.
+
+    :ivar name: Metric name. Required.
+    :vartype name: str
+    :ivar value: Metric value. Required.
+    :vartype value: float
+    :ivar weight: Metric weight. Required.
+    :vartype weight: int
+    """
+
+    _validation = {
+        "name": {"required": True},
+        "value": {"required": True},
+        "weight": {"required": True},
+    }
+
+    _attribute_map = {
+        "name": {"key": "Name", "type": "str"},
+        "value": {"key": "Value", "type": "float"},
+        "weight": {"key": "Weight", "type": "int"},
+    }
+
+    def __init__(self, *, name: str, value: float, weight: int, **kwargs: Any) -> None:
+        """
+        :keyword name: Metric name. Required.
+        :paramtype name: str
+        :keyword value: Metric value. Required.
+        :paramtype value: float
+        :keyword weight: Metric weight. Required.
+        :paramtype weight: int
+        """
+        super().__init__(**kwargs)
+        self.name = name
+        self.value = value
+        self.weight = weight
+
+
+class MonitoringDataPoint(_serialization.Model):  # pylint: disable=too-many-instance-attributes
+    """Monitoring data point coming from the client, which includes metrics, documents and other
+    metadata info.
+
+    All required parameters must be populated in order to send to server.
+
+    :ivar version: Application Insights SDK version. Required.
+    :vartype version: str
+    :ivar invariant_version: Version/generation of the data contract (MonitoringDataPoint) between
+     SDK and Live Metrics. Required.
+    :vartype invariant_version: int
+    :ivar instance: Service instance name where Application Insights SDK lives. Required.
+    :vartype instance: str
+    :ivar role_name: Service role name. Required.
+    :vartype role_name: str
+    :ivar machine_name: Computer name where Application Insights SDK lives. Required.
+    :vartype machine_name: str
+    :ivar stream_id: Identifies an Application Insights SDK as a trusted agent to report metrics
+     and documents. Required.
+    :vartype stream_id: str
+    :ivar timestamp: Data point generation timestamp.
+    :vartype timestamp: ~datetime.datetime
+    :ivar transmission_time: Timestamp when the client transmits the metrics and documents to Live
+     Metrics.
+    :vartype transmission_time: ~datetime.datetime
+    :ivar is_web_app: True if the current application is an Azure Web App. Required.
+    :vartype is_web_app: bool
+    :ivar performance_collection_supported: True if performance counters collection is supported.
+     Required.
+    :vartype performance_collection_supported: bool
+    :ivar metrics: An array of metric data points.
+    :vartype metrics: list[~quickpulse_client.models.MetricPoint]
+    :ivar documents: An array of documents of a specific type {Request}, {RemoteDependency},
+     {Exception}, {Event}, or {Trace}.
+    :vartype documents: list[~quickpulse_client.models.DocumentIngress]
+    :ivar top_cpu_processes: An array of top cpu consumption data point.
+    :vartype top_cpu_processes: list[~quickpulse_client.models.ProcessCpuData]
+    :ivar collection_configuration_errors: An array of error while SDK parses and applies the
+     {CollectionConfigurationInfo} provided by Live Metrics.
+    :vartype collection_configuration_errors:
+     list[~quickpulse_client.models.CollectionConfigurationError]
+    """
+
+    _validation = {
+        "version": {"required": True},
+        "invariant_version": {"required": True},
+        "instance": {"required": True},
+        "role_name": {"required": True},
+        "machine_name": {"required": True},
+        "stream_id": {"required": True},
+        "is_web_app": {"required": True},
+        "performance_collection_supported": {"required": True},
+    }
+
+    _attribute_map = {
+        "version": {"key": "Version", "type": "str"},
+        "invariant_version": {"key": "InvariantVersion", "type": "int"},
+        "instance": {"key": "Instance", "type": "str"},
+        "role_name": {"key": "RoleName", "type": "str"},
+        "machine_name": {"key": "MachineName", "type": "str"},
+        "stream_id": {"key": "StreamId", "type": "str"},
+        "timestamp": {"key": "Timestamp", "type": "iso-8601"},
+        "transmission_time": {"key": "TransmissionTime", "type": "iso-8601"},
+        "is_web_app": {"key": "IsWebApp", "type": "bool"},
+        "performance_collection_supported": {"key": "PerformanceCollectionSupported", "type": "bool"},
+        "metrics": {"key": "Metrics", "type": "[MetricPoint]"},
+        "documents": {"key": "Documents", "type": "[DocumentIngress]"},
+        "top_cpu_processes": {"key": "TopCpuProcesses", "type": "[ProcessCpuData]"},
+        "collection_configuration_errors": {
+            "key": "CollectionConfigurationErrors",
+            "type": "[CollectionConfigurationError]",
+        },
+    }
+
+    def __init__(
+        self,
+        *,
+        version: str,
+        invariant_version: int,
+        instance: str,
+        role_name: str,
+        machine_name: str,
+        stream_id: str,
+        is_web_app: bool,
+        performance_collection_supported: bool,
+        timestamp: Optional[datetime.datetime] = None,
+        transmission_time: Optional[datetime.datetime] = None,
+        metrics: Optional[List["_models.MetricPoint"]] = None,
+        documents: Optional[List["_models.DocumentIngress"]] = None,
+        top_cpu_processes: Optional[List["_models.ProcessCpuData"]] = None,
+        collection_configuration_errors: Optional[List["_models.CollectionConfigurationError"]] = None,
+        **kwargs: Any
+    ) -> None:
+        """
+        :keyword version: Application Insights SDK version. Required.
+        :paramtype version: str
+        :keyword invariant_version: Version/generation of the data contract (MonitoringDataPoint)
+         between SDK and Live Metrics. Required.
+        :paramtype invariant_version: int
+        :keyword instance: Service instance name where Application Insights SDK lives. Required.
+        :paramtype instance: str
+        :keyword role_name: Service role name. Required.
+        :paramtype role_name: str
+        :keyword machine_name: Computer name where Application Insights SDK lives. Required.
+        :paramtype machine_name: str
+        :keyword stream_id: Identifies an Application Insights SDK as a trusted agent to report metrics
+         and documents. Required.
+        :paramtype stream_id: str
+        :keyword timestamp: Data point generation timestamp.
+        :paramtype timestamp: ~datetime.datetime
+        :keyword transmission_time: Timestamp when the client transmits the metrics and documents to
+         Live Metrics.
+        :paramtype transmission_time: ~datetime.datetime
+        :keyword is_web_app: True if the current application is an Azure Web App. Required.
+        :paramtype is_web_app: bool
+        :keyword performance_collection_supported: True if performance counters collection is
+         supported. Required.
+        :paramtype performance_collection_supported: bool
+        :keyword metrics: An array of metric data points.
+        :paramtype metrics: list[~quickpulse_client.models.MetricPoint]
+        :keyword documents: An array of documents of a specific type {Request}, {RemoteDependency},
+         {Exception}, {Event}, or {Trace}.
+        :paramtype documents: list[~quickpulse_client.models.DocumentIngress]
+        :keyword top_cpu_processes: An array of top cpu consumption data point.
+        :paramtype top_cpu_processes: list[~quickpulse_client.models.ProcessCpuData]
+        :keyword collection_configuration_errors: An array of error while SDK parses and applies the
+         {CollectionConfigurationInfo} provided by Live Metrics.
+        :paramtype collection_configuration_errors:
+         list[~quickpulse_client.models.CollectionConfigurationError]
+        """
+        super().__init__(**kwargs)
+        self.version = version
+        self.invariant_version = invariant_version
+        self.instance = instance
+        self.role_name = role_name
+        self.machine_name = machine_name
+        self.stream_id = stream_id
+        self.timestamp = timestamp
+        self.transmission_time = transmission_time
+        self.is_web_app = is_web_app
+        self.performance_collection_supported = performance_collection_supported
+        self.metrics = metrics
+        self.documents = documents
+        self.top_cpu_processes = top_cpu_processes
+        self.collection_configuration_errors = collection_configuration_errors
+
+
+class ProcessCpuData(_serialization.Model):
+    """CPU consumption datapoint.
+
+    All required parameters must be populated in order to send to server.
+
+    :ivar process_name: Process name. Required.
+    :vartype process_name: str
+    :ivar cpu_percentage: CPU consumption percentage. Required.
+    :vartype cpu_percentage: int
+    """
+
+    _validation = {
+        "process_name": {"required": True},
+        "cpu_percentage": {"required": True},
+    }
+
+    _attribute_map = {
+        "process_name": {"key": "ProcessName", "type": "str"},
+        "cpu_percentage": {"key": "CpuPercentage", "type": "int"},
+    }
+
+    def __init__(self, *, process_name: str, cpu_percentage: int, **kwargs: Any) -> None:
+        """
+        :keyword process_name: Process name. Required.
+        :paramtype process_name: str
+        :keyword cpu_percentage: CPU consumption percentage. Required.
+        :paramtype cpu_percentage: int
+        """
+        super().__init__(**kwargs)
+        self.process_name = process_name
+        self.cpu_percentage = cpu_percentage
+
+
+class QuotaConfigurationInfo(_serialization.Model):
+    """Controls document quotas to be sent to Live Metrics.
+
+    All required parameters must be populated in order to send to server.
+
+    :ivar initial_quota: Initial quota.
+    :vartype initial_quota: float
+    :ivar max_quota: Max quota. Required.
+    :vartype max_quota: float
+    :ivar quota_accrual_rate_per_sec: Quota accrual rate per second. Required.
+    :vartype quota_accrual_rate_per_sec: float
+    """
+
+    _validation = {
+        "max_quota": {"required": True},
+        "quota_accrual_rate_per_sec": {"required": True},
+    }
+
+    _attribute_map = {
+        "initial_quota": {"key": "InitialQuota", "type": "float"},
+        "max_quota": {"key": "MaxQuota", "type": "float"},
+        "quota_accrual_rate_per_sec": {"key": "QuotaAccrualRatePerSec", "type": "float"},
+    }
+
+    def __init__(
+        self,
+        *,
+        max_quota: float,
+        quota_accrual_rate_per_sec: float,
+        initial_quota: Optional[float] = None,
+        **kwargs: Any
+    ) -> None:
+        """
+        :keyword initial_quota: Initial quota.
+        :paramtype initial_quota: float
+        :keyword max_quota: Max quota. Required.
+        :paramtype max_quota: float
+        :keyword quota_accrual_rate_per_sec: Quota accrual rate per second. Required.
+        :paramtype quota_accrual_rate_per_sec: float
+        """
+        super().__init__(**kwargs)
+        self.initial_quota = initial_quota
+        self.max_quota = max_quota
+        self.quota_accrual_rate_per_sec = quota_accrual_rate_per_sec
+
+
+class RemoteDependency(DocumentIngress):
+    """RemoteDependency document type.
+
+    All required parameters must be populated in order to send to server.
+
+    :ivar document_type: Telemetry type. Types not defined in enum will get replaced with a
+     'Unknown' type. Required. Known values are: "Request", "RemoteDependency", "Exception",
+     "Event", "Trace", and "Unknown".
+    :vartype document_type: str or ~quickpulse_client.models.DocumentType
+    :ivar document_stream_ids: An array of document streaming ids. Each id identifies a flow of
+     documents customized by UX customers.
+    :vartype document_stream_ids: list[str]
+    :ivar properties: Collection of custom properties.
+    :vartype properties: list[~quickpulse_client.models.KeyValuePairString]
+    :ivar name: Name of the command initiated with this dependency call, e.g., GET /username.
+    :vartype name: str
+    :ivar command_name: URL of the dependency call to the target, with all query string parameters.
+    :vartype command_name: str
+    :ivar result_code: Result code of a dependency call. Examples are SQL error code and HTTP
+     status code.
+    :vartype result_code: str
+    :ivar duration: Request duration in ISO 8601 duration format, i.e., P[n]Y[n]M[n]DT[n]H[n]M[n]S
+     or P[n]W.
+    :vartype duration: str
+    """
+
+    _validation = {
+        "document_type": {"required": True},
+        "name": {"max_length": 1024},
+        "command_name": {"max_length": 2048},
+        "result_code": {"max_length": 1024},
+    }
+
+    _attribute_map = {
+        "document_type": {"key": "DocumentType", "type": "str"},
+        "document_stream_ids": {"key": "DocumentStreamIds", "type": "[str]"},
+        "properties": {"key": "Properties", "type": "[KeyValuePairString]"},
+        "name": {"key": "Name", "type": "str"},
+        "command_name": {"key": "CommandName", "type": "str"},
+        "result_code": {"key": "ResultCode", "type": "str"},
+        "duration": {"key": "Duration", "type": "str"},
+    }
+
+    def __init__(
+        self,
+        *,
+        document_stream_ids: Optional[List[str]] = None,
+        properties: Optional[List["_models.KeyValuePairString"]] = None,
+        name: Optional[str] = None,
+        command_name: Optional[str] = None,
+        result_code: Optional[str] = None,
+        duration: Optional[str] = None,
+        **kwargs: Any
+    ) -> None:
+        """
+        :keyword document_stream_ids: An array of document streaming ids. Each id identifies a flow of
+         documents customized by UX customers.
+        :paramtype document_stream_ids: list[str]
+        :keyword properties: Collection of custom properties.
+        :paramtype properties: list[~quickpulse_client.models.KeyValuePairString]
+        :keyword name: Name of the command initiated with this dependency call, e.g., GET /username.
+        :paramtype name: str
+        :keyword command_name: URL of the dependency call to the target, with all query string
+         parameters.
+        :paramtype command_name: str
+        :keyword result_code: Result code of a dependency call. Examples are SQL error code and HTTP
+         status code.
+        :paramtype result_code: str
+        :keyword duration: Request duration in ISO 8601 duration format, i.e.,
+         P[n]Y[n]M[n]DT[n]H[n]M[n]S or P[n]W.
+        :paramtype duration: str
+        """
+        super().__init__(document_stream_ids=document_stream_ids, properties=properties, **kwargs)
+        self.document_type: str = "RemoteDependency"
+        self.name = name
+        self.command_name = command_name
+        self.result_code = result_code
+        self.duration = duration
+
+
+class Request(DocumentIngress):
+    """Request document type.
+
+    All required parameters must be populated in order to send to server.
+
+    :ivar document_type: Telemetry type. Types not defined in enum will get replaced with a
+     'Unknown' type. Required. Known values are: "Request", "RemoteDependency", "Exception",
+     "Event", "Trace", and "Unknown".
+    :vartype document_type: str or ~quickpulse_client.models.DocumentType
+    :ivar document_stream_ids: An array of document streaming ids. Each id identifies a flow of
+     documents customized by UX customers.
+    :vartype document_stream_ids: list[str]
+    :ivar properties: Collection of custom properties.
+    :vartype properties: list[~quickpulse_client.models.KeyValuePairString]
+    :ivar name: Name of the request, e.g., 'GET /values/{id}'.
+    :vartype name: str
+    :ivar url: Request URL with all query string parameters.
+    :vartype url: str
+    :ivar response_code: Result of a request execution. For http requests, it could be some HTTP
+     status code.
+    :vartype response_code: str
+    :ivar duration: Request duration in ISO 8601 duration format, i.e., P[n]Y[n]M[n]DT[n]H[n]M[n]S
+     or P[n]W.
+    :vartype duration: str
+    """
+
+    _validation = {
+        "document_type": {"required": True},
+        "name": {"max_length": 1024},
+        "url": {"max_length": 2048},
+        "response_code": {"max_length": 1024},
+    }
+
+    _attribute_map = {
+        "document_type": {"key": "DocumentType", "type": "str"},
+        "document_stream_ids": {"key": "DocumentStreamIds", "type": "[str]"},
+        "properties": {"key": "Properties", "type": "[KeyValuePairString]"},
+        "name": {"key": "Name", "type": "str"},
+        "url": {"key": "Url", "type": "str"},
+        "response_code": {"key": "ResponseCode", "type": "str"},
+        "duration": {"key": "Duration", "type": "str"},
+    }
+
+    def __init__(
+        self,
+        *,
+        document_stream_ids: Optional[List[str]] = None,
+        properties: Optional[List["_models.KeyValuePairString"]] = None,
+        name: Optional[str] = None,
+        url: Optional[str] = None,
+        response_code: Optional[str] = None,
+        duration: Optional[str] = None,
+        **kwargs: Any
+    ) -> None:
+        """
+        :keyword document_stream_ids: An array of document streaming ids. Each id identifies a flow of
+         documents customized by UX customers.
+        :paramtype document_stream_ids: list[str]
+        :keyword properties: Collection of custom properties.
+        :paramtype properties: list[~quickpulse_client.models.KeyValuePairString]
+        :keyword name: Name of the request, e.g., 'GET /values/{id}'.
+        :paramtype name: str
+        :keyword url: Request URL with all query string parameters.
+        :paramtype url: str
+        :keyword response_code: Result of a request execution. For http requests, it could be some HTTP
+         status code.
+        :paramtype response_code: str
+        :keyword duration: Request duration in ISO 8601 duration format, i.e.,
+         P[n]Y[n]M[n]DT[n]H[n]M[n]S or P[n]W.
+        :paramtype duration: str
+        """
+        super().__init__(document_stream_ids=document_stream_ids, properties=properties, **kwargs)
+        self.document_type: str = "Request"
+        self.name = name
+        self.url = url
+        self.response_code = response_code
+        self.duration = duration
+
+
+class ServiceError(_serialization.Model):
+    """Optional http response body, whose existence carries additional error descriptions.
+
+    All required parameters must be populated in order to send to server.
+
+    :ivar request_id: A globally unique identifier to identify the diagnostic context. It defaults
+     to the empty GUID.
+    :vartype request_id: str
+    :ivar response_date_time: Service error response date time. Required.
+    :vartype response_date_time: str
+    :ivar code: Error code. Required.
+    :vartype code: str
+    :ivar message: Error message. Required.
+    :vartype message: str
+    :ivar exception: Message of the exception that triggers the error response. Required.
+    :vartype exception: str
+    """
+
+    _validation = {
+        "request_id": {"required": True},
+        "response_date_time": {"required": True},
+        "code": {"required": True},
+        "message": {"required": True},
+        "exception": {"required": True},
+    }
+
+    _attribute_map = {
+        "request_id": {"key": "RequestId", "type": "str"},
+        "response_date_time": {"key": "ResponseDateTime", "type": "str"},
+        "code": {"key": "Code", "type": "str"},
+        "message": {"key": "Message", "type": "str"},
+        "exception": {"key": "Exception", "type": "str"},
+    }
+
+    def __init__(
+        self,
+        *,
+        request_id: str = "00000000-0000-0000-0000-000000000000",
+        response_date_time: str,
+        code: str,
+        message: str,
+        exception: str,
+        **kwargs: Any
+    ) -> None:
+        """
+        :keyword request_id: A globally unique identifier to identify the diagnostic context. It
+         defaults to the empty GUID.
+        :paramtype request_id: str
+        :keyword response_date_time: Service error response date time. Required.
+        :paramtype response_date_time: str
+        :keyword code: Error code. Required.
+        :paramtype code: str
+        :keyword message: Error message. Required.
+        :paramtype message: str
+        :keyword exception: Message of the exception that triggers the error response. Required.
+        :paramtype exception: str
+        """
+        super().__init__(**kwargs)
+        self.request_id = request_id
+        self.response_date_time = response_date_time
+        self.code = code
+        self.message = message
+        self.exception = exception
+
+
+class Trace(DocumentIngress):
+    """Trace document type.
+
+    All required parameters must be populated in order to send to server.
+
+    :ivar document_type: Telemetry type. Types not defined in enum will get replaced with a
+     'Unknown' type. Required. Known values are: "Request", "RemoteDependency", "Exception",
+     "Event", "Trace", and "Unknown".
+    :vartype document_type: str or ~quickpulse_client.models.DocumentType
+    :ivar document_stream_ids: An array of document streaming ids. Each id identifies a flow of
+     documents customized by UX customers.
+    :vartype document_stream_ids: list[str]
+    :ivar properties: Collection of custom properties.
+    :vartype properties: list[~quickpulse_client.models.KeyValuePairString]
+    :ivar message: Trace message.
+    :vartype message: str
+    """
+
+    _validation = {
+        "document_type": {"required": True},
+        "message": {"max_length": 32768},
+    }
+
+    _attribute_map = {
+        "document_type": {"key": "DocumentType", "type": "str"},
+        "document_stream_ids": {"key": "DocumentStreamIds", "type": "[str]"},
+        "properties": {"key": "Properties", "type": "[KeyValuePairString]"},
+        "message": {"key": "Message", "type": "str"},
+    }
+
+    def __init__(
+        self,
+        *,
+        document_stream_ids: Optional[List[str]] = None,
+        properties: Optional[List["_models.KeyValuePairString"]] = None,
+        message: Optional[str] = None,
+        **kwargs: Any
+    ) -> None:
+        """
+        :keyword document_stream_ids: An array of document streaming ids. Each id identifies a flow of
+         documents customized by UX customers.
+        :paramtype document_stream_ids: list[str]
+        :keyword properties: Collection of custom properties.
+        :paramtype properties: list[~quickpulse_client.models.KeyValuePairString]
+        :keyword message: Trace message.
+        :paramtype message: str
+        """
+        super().__init__(document_stream_ids=document_stream_ids, properties=properties, **kwargs)
+        self.document_type: str = "Trace"
+        self.message = message
diff --git a/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/models/_patch.py b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/models/_patch.py
new file mode 100644
index 00000000..f7dd3251
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/models/_patch.py
@@ -0,0 +1,20 @@
+# ------------------------------------
+# Copyright (c) Microsoft Corporation.
+# Licensed under the MIT License.
+# ------------------------------------
+"""Customize generated code here.
+
+Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize
+"""
+from typing import List
+
+__all__: List[str] = []  # Add all objects you want publicly available to users at this package level
+
+
+def patch_sdk():
+    """Do not remove from this file.
+
+    `patch_sdk` is a last resort escape hatch that allows you to do customizations
+    you can't accomplish using the techniques described in
+    https://aka.ms/azsdk/python/dpcodegen/python/customize
+    """
diff --git a/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/py.typed b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/py.typed
new file mode 100644
index 00000000..e5aff4f8
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/py.typed
@@ -0,0 +1 @@
+# Marker file for PEP 561.
\ No newline at end of file
diff --git a/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_live_metrics.py b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_live_metrics.py
new file mode 100644
index 00000000..7ce7874f
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_live_metrics.py
@@ -0,0 +1,306 @@
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License.
+# cSpell:disable
+from typing import Any, Dict, List, Optional
+
+import logging
+import platform
+import psutil
+
+from opentelemetry.sdk._logs import LogData
+from opentelemetry.sdk.metrics import MeterProvider
+from opentelemetry.sdk.resources import Resource
+from opentelemetry.sdk.trace import ReadableSpan
+from opentelemetry.sdk.trace.id_generator import RandomIdGenerator
+from opentelemetry.semconv.trace import SpanAttributes
+from opentelemetry.trace import SpanKind
+
+from azure.monitor.opentelemetry.exporter._generated.models import ContextTagKeys
+from azure.monitor.opentelemetry.exporter._quickpulse._constants import (
+    _COMMITTED_BYTES_NAME,
+    _DEPENDENCY_DURATION_NAME,
+    _DEPENDENCY_FAILURE_RATE_NAME,
+    _DEPENDENCY_RATE_NAME,
+    _EXCEPTION_RATE_NAME,
+    _PROCESS_PHYSICAL_BYTES_NAME,
+    _PROCESS_TIME_NORMALIZED_NAME,
+    _PROCESSOR_TIME_NAME,
+    _REQUEST_DURATION_NAME,
+    _REQUEST_FAILURE_RATE_NAME,
+    _REQUEST_RATE_NAME,
+)
+from azure.monitor.opentelemetry.exporter._quickpulse._cpu import (
+    _get_process_memory,
+    _get_process_time_normalized,
+    _get_process_time_normalized_old,
+)
+from azure.monitor.opentelemetry.exporter._quickpulse._exporter import (
+    _QuickpulseExporter,
+    _QuickpulseMetricReader,
+)
+from azure.monitor.opentelemetry.exporter._quickpulse._filter import (
+    _check_filters,
+    _check_metric_filters,
+)
+from azure.monitor.opentelemetry.exporter._quickpulse._generated.models import (
+    DerivedMetricInfo,
+    FilterConjunctionGroupInfo,
+    MonitoringDataPoint,
+    TelemetryType,
+)
+from azure.monitor.opentelemetry.exporter._quickpulse._projection import (
+    _create_projections,
+)
+from azure.monitor.opentelemetry.exporter._quickpulse._state import (
+    _QuickpulseState,
+    _is_post_state,
+    _append_quickpulse_document,
+    _get_quickpulse_derived_metric_infos,
+    _get_quickpulse_doc_stream_infos,
+    _set_global_quickpulse_state,
+)
+from azure.monitor.opentelemetry.exporter._quickpulse._types import (
+    _DependencyData,
+    _ExceptionData,
+    _RequestData,
+    _TelemetryData,
+    _TraceData,
+)
+from azure.monitor.opentelemetry.exporter._quickpulse._utils import (
+    _get_log_record_document,
+    _get_span_document,
+)
+from azure.monitor.opentelemetry.exporter.statsbeat._state import (
+    set_statsbeat_live_metrics_feature_set,
+)
+from azure.monitor.opentelemetry.exporter._utils import (
+    _get_sdk_version,
+    _is_on_app_service,
+    _populate_part_a_fields,
+    Singleton,
+)
+
+_logger = logging.getLogger(__name__)
+
+
+PROCESS = psutil.Process()
+NUM_CPUS = psutil.cpu_count()
+
+
+def enable_live_metrics(**kwargs: Any) -> None:  # pylint: disable=C4758
+    """Live metrics entry point.
+
+    :keyword str connection_string: The connection string used for your Application Insights resource.
+    :keyword Resource resource: The OpenTelemetry Resource used for this Python application.
+    :keyword TokenCredential credential: Token credential, such as ManagedIdentityCredential or
+        ClientSecretCredential, used for Azure Active Directory (AAD) authentication. Defaults to None.
+    :rtype: None
+    """
+    _QuickpulseManager(**kwargs)
+    # We can detect feature usage for statsbeat since we are in an opt-in model currently
+    # Once we move to live metrics on-by-default, we will have to check for both explicit usage
+    # and whether or not user is actually using live metrics (being on live metrics blade in UX)
+    set_statsbeat_live_metrics_feature_set()
+
+
+# pylint: disable=protected-access,too-many-instance-attributes
+class _QuickpulseManager(metaclass=Singleton):
+
+    def __init__(self, **kwargs: Any) -> None:
+        _set_global_quickpulse_state(_QuickpulseState.PING_SHORT)
+        self._exporter = _QuickpulseExporter(**kwargs)
+        part_a_fields = {}
+        resource = kwargs.get("resource")
+        if not resource:
+            resource = Resource.create({})
+        part_a_fields = _populate_part_a_fields(resource)
+        id_generator = RandomIdGenerator()
+        self._base_monitoring_data_point = MonitoringDataPoint(
+            version=_get_sdk_version(),
+            # Invariant version 5 indicates filtering is supported
+            invariant_version=5,
+            instance=part_a_fields.get(ContextTagKeys.AI_CLOUD_ROLE_INSTANCE, ""),
+            role_name=part_a_fields.get(ContextTagKeys.AI_CLOUD_ROLE, ""),
+            machine_name=platform.node(),
+            stream_id=str(id_generator.generate_trace_id()),
+            is_web_app=_is_on_app_service(),
+            performance_collection_supported=True,
+        )
+        self._reader = _QuickpulseMetricReader(self._exporter, self._base_monitoring_data_point)
+        self._meter_provider = MeterProvider(
+            metric_readers=[self._reader],
+            resource=resource,
+        )
+        self._meter = self._meter_provider.get_meter("azure_monitor_live_metrics")
+
+        self._request_duration = self._meter.create_histogram(
+            _REQUEST_DURATION_NAME[0], "ms", "live metrics avg request duration in ms"
+        )
+        self._dependency_duration = self._meter.create_histogram(
+            _DEPENDENCY_DURATION_NAME[0], "ms", "live metrics avg dependency duration in ms"
+        )
+        # We use a counter to represent rates per second because collection
+        # interval is one second so we simply need the number of requests
+        # within the collection interval
+        self._request_rate_counter = self._meter.create_counter(
+            _REQUEST_RATE_NAME[0], "req/sec", "live metrics request rate per second"
+        )
+        self._request_failed_rate_counter = self._meter.create_counter(
+            _REQUEST_FAILURE_RATE_NAME[0], "req/sec", "live metrics request failed rate per second"
+        )
+        self._dependency_rate_counter = self._meter.create_counter(
+            _DEPENDENCY_RATE_NAME[0], "dep/sec", "live metrics dependency rate per second"
+        )
+        self._dependency_failure_rate_counter = self._meter.create_counter(
+            _DEPENDENCY_FAILURE_RATE_NAME[0], "dep/sec", "live metrics dependency failure rate per second"
+        )
+        self._exception_rate_counter = self._meter.create_counter(
+            _EXCEPTION_RATE_NAME[0], "exc/sec", "live metrics exception rate per second"
+        )
+        self._process_memory_gauge_old = self._meter.create_observable_gauge(
+            _COMMITTED_BYTES_NAME[0],
+            [_get_process_memory],
+        )
+        self._process_memory_gauge = self._meter.create_observable_gauge(
+            _PROCESS_PHYSICAL_BYTES_NAME[0],
+            [_get_process_memory],
+        )
+        self._process_time_gauge_old = self._meter.create_observable_gauge(
+            _PROCESSOR_TIME_NAME[0],
+            [_get_process_time_normalized_old],
+        )
+        self._process_time_gauge = self._meter.create_observable_gauge(
+            _PROCESS_TIME_NORMALIZED_NAME[0],
+            [_get_process_time_normalized],
+        )
+
+    def _record_span(self, span: ReadableSpan) -> None:
+        # Only record if in post state
+        if _is_post_state():
+            try:
+                duration_ms = 0
+                if span.end_time and span.start_time:
+                    duration_ms = (span.end_time - span.start_time) / 1e9  # type: ignore
+                # TODO: Spec out what "success" is
+                success = span.status.is_ok
+
+                if span.kind in (SpanKind.SERVER, SpanKind.CONSUMER):
+                    if success:
+                        self._request_rate_counter.add(1)
+                    else:
+                        self._request_failed_rate_counter.add(1)
+                    self._request_duration.record(duration_ms)
+                else:
+                    if success:
+                        self._dependency_rate_counter.add(1)
+                    else:
+                        self._dependency_failure_rate_counter.add(1)
+                    self._dependency_duration.record(duration_ms)
+
+                # Derive metrics for quickpulse filtering
+                data = _TelemetryData._from_span(span)
+                _derive_metrics_from_telemetry_data(data)
+
+                # Process docs for quickpulse filtering
+                _apply_document_filters_from_telemetry_data(data)
+
+                # Derive exception metrics from span events
+                if span.events:
+                    for event in span.events:
+                        if event.name == "exception":
+                            self._exception_rate_counter.add(1)
+                            # Derive metrics for quickpulse filtering for exception
+                            exc_data = _ExceptionData._from_span_event(event)
+                            _derive_metrics_from_telemetry_data(exc_data)
+                            # Process docs for quickpulse filtering for exception
+                            _apply_document_filters_from_telemetry_data(exc_data)
+            except Exception:  # pylint: disable=broad-except
+                _logger.exception("Exception occurred while recording span.")
+
+    def _record_log_record(self, log_data: LogData) -> None:
+        # Only record if in post state
+        if _is_post_state():
+            try:
+                if log_data.log_record:
+                    exc_type = None
+                    log_record = log_data.log_record
+                    if log_record.attributes:
+                        exc_type = log_record.attributes.get(SpanAttributes.EXCEPTION_TYPE)
+                        exc_message = log_record.attributes.get(SpanAttributes.EXCEPTION_MESSAGE)
+                        if exc_type is not None or exc_message is not None:
+                            self._exception_rate_counter.add(1)
+
+                    # Derive metrics for quickpulse filtering
+                    data = _TelemetryData._from_log_record(log_record)
+                    _derive_metrics_from_telemetry_data(data)
+
+                    # Process docs for quickpulse filtering
+                    _apply_document_filters_from_telemetry_data(data, exc_type)  # type: ignore
+            except Exception:  # pylint: disable=broad-except
+                _logger.exception("Exception occurred while recording log record.")
+
+
+# Filtering
+
+# Called by record_span/record_log when processing a span/log_record for metrics filtering
+# Derives metrics from projections if applicable to current filters in config
+def _derive_metrics_from_telemetry_data(data: _TelemetryData):
+    metric_infos_dict: Dict[TelemetryType, List[DerivedMetricInfo]] = _get_quickpulse_derived_metric_infos()
+    # if empty, filtering was not configured
+    if not metric_infos_dict:
+        return
+    metric_infos = []  # type: ignore
+    if isinstance(data, _RequestData):
+        metric_infos = metric_infos_dict.get(TelemetryType.REQUEST)  # type: ignore
+    elif isinstance(data, _DependencyData):
+        metric_infos = metric_infos_dict.get(TelemetryType.DEPENDENCY)  # type: ignore
+    elif isinstance(data, _ExceptionData):
+        metric_infos = metric_infos_dict.get(TelemetryType.EXCEPTION)  # type: ignore
+    elif isinstance(data, _TraceData):
+        metric_infos = metric_infos_dict.get(TelemetryType.TRACE)  # type: ignore
+    if metric_infos and _check_metric_filters(metric_infos, data):
+        # Since this data matches the filter, create projections used to
+        # generate filtered metrics
+        _create_projections(metric_infos, data)
+
+
+# Called by record_span/record_log when processing a span/log_record for docs filtering
+# Finds doc stream Ids and their doc filter configurations
+def _apply_document_filters_from_telemetry_data(data: _TelemetryData, exc_type: Optional[str] = None):
+    doc_config_dict: Dict[TelemetryType, Dict[str, List[FilterConjunctionGroupInfo]]] = _get_quickpulse_doc_stream_infos()  # pylint: disable=C0301
+    stream_ids = set()
+    doc_config = {}  # type: ignore
+    if isinstance(data, _RequestData):
+        doc_config = doc_config_dict.get(TelemetryType.REQUEST, {})  # type: ignore
+    elif isinstance(data, _DependencyData):
+        doc_config = doc_config_dict.get(TelemetryType.DEPENDENCY, {})  # type: ignore
+    elif isinstance(data, _ExceptionData):
+        doc_config = doc_config_dict.get(TelemetryType.EXCEPTION, {})  # type: ignore
+    elif isinstance(data, _TraceData):
+        doc_config = doc_config_dict.get(TelemetryType.TRACE, {})  # type: ignore
+    for stream_id, filter_groups in doc_config.items():
+        for filter_group in filter_groups:
+            if _check_filters(filter_group.filters, data):
+                stream_ids.add(stream_id)
+                break
+
+    # We only append and send the document if either:
+    # 1. The document matched the filtering for a specific streamId
+    # 2. Filtering was not enabled for this telemetry type (empty doc_config)
+    if len(stream_ids) > 0 or not doc_config:
+        if type(data) in (_DependencyData, _RequestData):
+            document = _get_span_document(data)  # type: ignore
+        else:
+            document = _get_log_record_document(data, exc_type)  # type: ignore
+        # A stream (with a unique streamId) is relevant if there are multiple sources sending to the same
+        # ApplicationInsights instace with live metrics enabled
+        # Modify the document's streamIds to determine which stream to send to in post
+        # Note that the default case is that the list of document_stream_ids is empty, in which
+        # case no filtering is done for the telemetry type and it is sent to all streams
+        if stream_ids:
+            document.document_stream_ids = list(stream_ids)
+
+        # Add the generated document to be sent to quickpulse
+        _append_quickpulse_document(document)
+
+# cSpell:enable
diff --git a/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_policy.py b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_policy.py
new file mode 100644
index 00000000..b0d84aea
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_policy.py
@@ -0,0 +1,36 @@
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License.
+
+from typing import Any, Optional
+from urllib.parse import urlparse
+from weakref import ReferenceType
+
+from azure.core.pipeline import PipelineResponse, policies
+
+from azure.monitor.opentelemetry.exporter._quickpulse._constants import _QUICKPULSE_REDIRECT_HEADER_NAME
+from azure.monitor.opentelemetry.exporter._quickpulse._generated import QuickpulseClient
+
+
+# Quickpulse endpoint handles redirects via header instead of status codes
+# We use a custom RedirectPolicy to handle this use case
+# pylint: disable=protected-access
+class _QuickpulseRedirectPolicy(policies.RedirectPolicy):
+
+    def __init__(self, **kwargs: Any) -> None:
+        # Weakref to QuickPulseClient instance
+        self._qp_client_ref: Optional[ReferenceType[QuickpulseClient]] = None
+        super().__init__(**kwargs)
+
+    # Gets the redirect location from header
+    def get_redirect_location(self, response: PipelineResponse) -> Optional[str]:
+        redirect_location = response.http_response.headers.get(_QUICKPULSE_REDIRECT_HEADER_NAME)
+        qp_client = None
+        if redirect_location:
+            redirected_url = urlparse(redirect_location)
+            if redirected_url.scheme and redirected_url.netloc:
+                if self._qp_client_ref:
+                    qp_client = self._qp_client_ref()
+                if qp_client and qp_client._client:
+                    # Set new endpoint to redirect location
+                    qp_client._client._base_url = f"{redirected_url.scheme}://{redirected_url.netloc}"
+        return redirect_location  # type: ignore
diff --git a/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_processor.py b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_processor.py
new file mode 100644
index 00000000..0bcc69ec
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_processor.py
@@ -0,0 +1,33 @@
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License.
+
+from opentelemetry.sdk._logs import LogData, LogRecordProcessor
+from opentelemetry.sdk.trace import ReadableSpan, SpanProcessor
+
+from azure.monitor.opentelemetry.exporter._quickpulse._live_metrics import _QuickpulseManager
+
+
+# pylint: disable=protected-access
+class _QuickpulseLogRecordProcessor(LogRecordProcessor):
+
+    def emit(self, log_data: LogData) -> None:  # type: ignore
+        qpm = _QuickpulseManager._instance
+        if qpm:
+            qpm._record_log_record(log_data)
+        super().emit(log_data)  # type: ignore[safe-super]
+
+    def shutdown(self):
+        pass
+
+    def force_flush(self, timeout_millis: int = 30000):
+        super().force_flush(timeout_millis=timeout_millis)  # type: ignore[safe-super]
+
+
+# pylint: disable=protected-access
+class _QuickpulseSpanProcessor(SpanProcessor):
+
+    def on_end(self, span: ReadableSpan) -> None:
+        qpm = _QuickpulseManager._instance
+        if qpm:
+            qpm._record_span(span)
+        return super().on_end(span)
diff --git a/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_projection.py b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_projection.py
new file mode 100644
index 00000000..998e422a
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_projection.py
@@ -0,0 +1,98 @@
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License.
+from typing import List, Optional, Tuple
+
+from azure.monitor.opentelemetry.exporter._quickpulse._constants import (
+    _QUICKPULSE_PROJECTION_COUNT,
+    _QUICKPULSE_PROJECTION_CUSTOM,
+    _QUICKPULSE_PROJECTION_DURATION,
+    _QUICKPULSE_PROJECTION_MAX_VALUE,
+    _QUICKPULSE_PROJECTION_MIN_VALUE,
+)
+from azure.monitor.opentelemetry.exporter._quickpulse._generated.models import (
+    AggregationType,
+    DerivedMetricInfo,
+)
+from azure.monitor.opentelemetry.exporter._quickpulse._state import (
+    _get_quickpulse_projection_map,
+    _set_quickpulse_projection_map,
+)
+from azure.monitor.opentelemetry.exporter._quickpulse._types import (
+    _DependencyData,
+    _RequestData,
+    _TelemetryData,
+)
+
+
+# Initialize metric projections per DerivedMetricInfo
+def _init_derived_metric_projection(filter_info: DerivedMetricInfo):
+    derived_metric_agg_value = 0
+    if filter_info.aggregation == AggregationType.MIN:
+        derived_metric_agg_value = _QUICKPULSE_PROJECTION_MAX_VALUE
+    elif filter_info.aggregation == AggregationType.MAX:
+        derived_metric_agg_value = _QUICKPULSE_PROJECTION_MIN_VALUE
+    elif filter_info.aggregation == AggregationType.SUM:
+        derived_metric_agg_value = 0
+    elif filter_info.aggregation == AggregationType.AVG:
+        derived_metric_agg_value = 0
+    _set_quickpulse_projection_map(
+        filter_info.id,
+        AggregationType(filter_info.aggregation),
+        derived_metric_agg_value,
+        0,
+    )
+
+
+# Create projections based off of DerivedMetricInfos and current data being processed
+def _create_projections(metric_infos: List[DerivedMetricInfo], data: _TelemetryData):
+    for metric_info in metric_infos:
+        value = 0
+        if metric_info.projection == _QUICKPULSE_PROJECTION_COUNT:
+            value = 1
+        elif metric_info.projection == _QUICKPULSE_PROJECTION_DURATION:
+            if isinstance(data, (_DependencyData, _RequestData)):
+                value = data.duration  # type: ignore
+            else:
+                # Duration only supported for Dependency and Requests
+                continue
+        elif metric_info.projection.startswith(_QUICKPULSE_PROJECTION_CUSTOM):
+            key = metric_info.projection.split(_QUICKPULSE_PROJECTION_CUSTOM, 1)[1].strip()
+            dim_value = data.custom_dimensions.get(key, 0)
+            if dim_value is None:
+                continue
+            try:
+                value = float(dim_value)  # type: ignore
+            except ValueError:
+                continue
+        else:
+            continue
+
+        aggregate: Optional[Tuple[float, int]] = _calculate_aggregation(
+            AggregationType(metric_info.aggregation),
+            metric_info.id,
+            value,
+        )
+        if aggregate:
+            _set_quickpulse_projection_map(
+                metric_info.id,
+                AggregationType(metric_info.aggregation),
+                aggregate[0],
+                aggregate[1],
+            )
+
+
+# Calculate aggregation based off of previous projection value, aggregation type of a specific metric filter
+# Return type is a Tuple of (value, count)
+def _calculate_aggregation(aggregation: AggregationType, id: str, value: float) -> Optional[Tuple[float, int]]:
+    projection: Optional[Tuple[AggregationType, float, int]] = _get_quickpulse_projection_map().get(id)
+    if projection:
+        prev_value = projection[1]
+        prev_count = projection[2]
+        if aggregation == AggregationType.SUM:
+            return (prev_value + value, prev_count + 1)
+        if aggregation == AggregationType.MIN:
+            return (min(prev_value, value), prev_count + 1)
+        if aggregation == AggregationType.MAX:
+            return (max(prev_value, value), prev_count + 1)
+        return (prev_value + value, prev_count + 1)
+    return None
diff --git a/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_state.py b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_state.py
new file mode 100644
index 00000000..f19bccda
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_state.py
@@ -0,0 +1,190 @@
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License.
+from datetime import datetime
+from enum import Enum
+from typing import Dict, List, Tuple
+
+from azure.monitor.opentelemetry.exporter._quickpulse._constants import (
+    _LONG_PING_INTERVAL_SECONDS,
+    _POST_INTERVAL_SECONDS,
+    _QUICKPULSE_PROJECTION_MAX_VALUE,
+    _QUICKPULSE_PROJECTION_MIN_VALUE,
+    _SHORT_PING_INTERVAL_SECONDS,
+)
+from azure.monitor.opentelemetry.exporter._quickpulse._generated.models import (
+    AggregationType,
+    DerivedMetricInfo,
+    DocumentIngress,
+    FilterConjunctionGroupInfo,
+    TelemetryType,
+)
+
+
+class _QuickpulseState(Enum):
+    """Current state of quickpulse service.
+    The numerical value represents the ping/post interval in ms for those states.
+    """
+
+    OFFLINE = 0
+    PING_SHORT = _SHORT_PING_INTERVAL_SECONDS
+    PING_LONG = _LONG_PING_INTERVAL_SECONDS
+    POST_SHORT = _POST_INTERVAL_SECONDS
+
+
+_GLOBAL_QUICKPULSE_STATE = _QuickpulseState.OFFLINE
+_QUICKPULSE_DOCUMENTS: List[DocumentIngress] = []
+_QUICKPULSE_LAST_PROCESS_TIME = 0.0
+_QUICKPULSE_PROCESS_ELAPSED_TIME = datetime.now()
+_QUICKPULSE_LAST_PROCESS_CPU = 0.0
+# Filtering
+_QUICKPULSE_ETAG = ""
+_QUICKPULSE_DERIVED_METRIC_INFOS: Dict[TelemetryType, List[DerivedMetricInfo]] = {}
+_QUICKPULSE_PROJECTION_MAP: Dict[str, Tuple[AggregationType, float, int]] = {}
+_QUICKPULSE_DOC_STREAM_INFOS: Dict[TelemetryType, Dict[str, List[FilterConjunctionGroupInfo]]] = {}
+
+
+def _set_global_quickpulse_state(state: _QuickpulseState) -> None:
+    # pylint: disable=global-statement
+    global _GLOBAL_QUICKPULSE_STATE
+    _GLOBAL_QUICKPULSE_STATE = state
+
+
+def _get_global_quickpulse_state() -> _QuickpulseState:
+    return _GLOBAL_QUICKPULSE_STATE
+
+
+def _set_quickpulse_last_process_time(time: float) -> None:
+    # pylint: disable=global-statement
+    global _QUICKPULSE_LAST_PROCESS_TIME
+    _QUICKPULSE_LAST_PROCESS_TIME = time
+
+
+def _get_quickpulse_last_process_time() -> float:
+    return _QUICKPULSE_LAST_PROCESS_TIME
+
+
+def _set_quickpulse_process_elapsed_time(time: datetime) -> None:
+    # pylint: disable=global-statement
+    global _QUICKPULSE_PROCESS_ELAPSED_TIME
+    _QUICKPULSE_PROCESS_ELAPSED_TIME = time
+
+
+def _get_quickpulse_process_elapsed_time() -> datetime:
+    return _QUICKPULSE_PROCESS_ELAPSED_TIME
+
+
+def _set_quickpulse_last_process_cpu(time: float) -> None:
+    # pylint: disable=global-statement
+    global _QUICKPULSE_LAST_PROCESS_CPU
+    _QUICKPULSE_LAST_PROCESS_CPU = time
+
+
+def _get_quickpulse_last_process_cpu() -> float:
+    return _QUICKPULSE_LAST_PROCESS_CPU
+
+
+def is_quickpulse_enabled() -> bool:
+    return _get_global_quickpulse_state() is not _QuickpulseState.OFFLINE
+
+
+def _is_ping_state() -> bool:
+    return _get_global_quickpulse_state() in (_QuickpulseState.PING_SHORT, _QuickpulseState.PING_LONG)
+
+
+def _is_post_state():
+    return _get_global_quickpulse_state() is _QuickpulseState.POST_SHORT
+
+
+def _append_quickpulse_document(document: DocumentIngress):
+    # pylint: disable=global-variable-not-assigned
+    global _QUICKPULSE_DOCUMENTS
+    # Limit risk of memory leak by limiting doc length to something manageable
+    if len(_QUICKPULSE_DOCUMENTS) > 20:
+        try:
+            _QUICKPULSE_DOCUMENTS.pop(0)
+        except IndexError:
+            pass
+    _QUICKPULSE_DOCUMENTS.append(document)
+
+
+def _get_and_clear_quickpulse_documents() -> List[DocumentIngress]:
+    # pylint: disable=global-statement
+    global _QUICKPULSE_DOCUMENTS
+    documents = list(_QUICKPULSE_DOCUMENTS)
+    _QUICKPULSE_DOCUMENTS = []
+    return documents
+
+
+# Filtering
+
+
+# Used for etag configuration
+def _set_quickpulse_etag(etag: str) -> None:
+    # pylint: disable=global-statement
+    global _QUICKPULSE_ETAG
+    _QUICKPULSE_ETAG = etag
+
+
+def _get_quickpulse_etag() -> str:
+    return _QUICKPULSE_ETAG
+
+
+# Used for updating metric filter configuration when etag has changed
+# Contains filter and projection of metrics to apply for each telemetry type if exists
+def _set_quickpulse_derived_metric_infos(filters: Dict[TelemetryType, List[DerivedMetricInfo]]) -> None:
+    # pylint: disable=global-statement
+    global _QUICKPULSE_DERIVED_METRIC_INFOS
+    _QUICKPULSE_DERIVED_METRIC_INFOS = filters
+
+
+def _get_quickpulse_derived_metric_infos() -> Dict[TelemetryType, List[DerivedMetricInfo]]:
+    return _QUICKPULSE_DERIVED_METRIC_INFOS
+
+
+# Used for initializing and setting projections when span/logs are recorded
+def _set_quickpulse_projection_map(metric_id: str, aggregation_type: AggregationType, value: float, count: int):
+    # pylint: disable=global-variable-not-assigned
+    global _QUICKPULSE_PROJECTION_MAP
+    _QUICKPULSE_PROJECTION_MAP[metric_id] = (aggregation_type, value, count)
+
+
+def _get_quickpulse_projection_map() -> Dict[str, Tuple[AggregationType, float, int]]:
+    return _QUICKPULSE_PROJECTION_MAP
+
+
+# Resets projections per derived metric info for next quickpulse interval
+# Called processing of previous quickpulse projections are finished/exported
+def _reset_quickpulse_projection_map():
+    # pylint: disable=global-statement
+    global _QUICKPULSE_PROJECTION_MAP
+    new_map = {}
+    if _QUICKPULSE_PROJECTION_MAP:
+        for id, projection in _QUICKPULSE_PROJECTION_MAP.items():
+            value = 0
+            if projection[0] == AggregationType.MIN:
+                value = _QUICKPULSE_PROJECTION_MAX_VALUE
+            elif projection[0] == AggregationType.MAX:
+                value = _QUICKPULSE_PROJECTION_MIN_VALUE
+            new_map[id] = (projection[0], value, 0)
+        _QUICKPULSE_PROJECTION_MAP.clear()
+        _QUICKPULSE_PROJECTION_MAP = new_map
+
+
+# clears the projection map, usually called when config changes
+def _clear_quickpulse_projection_map():
+    # pylint: disable=global-variable-not-assigned
+    global _QUICKPULSE_PROJECTION_MAP
+    _QUICKPULSE_PROJECTION_MAP.clear()
+
+
+# Used for updating doc filter configuration when etag has changed
+# Contains filter and projection of docs to apply for each telemetry type if exists
+# Format is Dict[TelemetryType, Dict[stream.id, List[FilterConjunctionGroupInfo]]]
+def _set_quickpulse_doc_stream_infos(filters: Dict[TelemetryType, Dict[str, List[FilterConjunctionGroupInfo]]]) -> None:
+    # pylint: disable=global-statement
+    global _QUICKPULSE_DOC_STREAM_INFOS
+    _QUICKPULSE_DOC_STREAM_INFOS = filters
+
+
+def _get_quickpulse_doc_stream_infos() -> Dict[TelemetryType, Dict[str, List[FilterConjunctionGroupInfo]]]:
+    return _QUICKPULSE_DOC_STREAM_INFOS
diff --git a/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_types.py b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_types.py
new file mode 100644
index 00000000..810c3f91
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_types.py
@@ -0,0 +1,235 @@
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License.
+# pylint: disable=protected-access
+from dataclasses import dataclass, fields
+from typing import Dict, no_type_check
+
+from opentelemetry.sdk._logs import LogRecord
+from opentelemetry.sdk.trace import Event, ReadableSpan
+from opentelemetry.semconv._incubating.attributes import gen_ai_attributes
+from opentelemetry.semconv.attributes.http_attributes import (
+    HTTP_REQUEST_METHOD,
+    HTTP_RESPONSE_STATUS_CODE,
+)
+from opentelemetry.semconv.trace import SpanAttributes
+from opentelemetry.trace import SpanKind
+
+from azure.monitor.opentelemetry.exporter.export.trace import _utils as trace_utils
+
+
+@dataclass
+class _TelemetryData:
+    custom_dimensions: Dict[str, str]
+
+    @staticmethod
+    def _from_span(span: ReadableSpan):
+        if span.kind in (SpanKind.SERVER, SpanKind.CONSUMER):
+            return _RequestData._from_span(span)
+        return _DependencyData._from_span(span)
+
+    @staticmethod
+    @no_type_check
+    def _from_log_record(log_record: LogRecord):
+        exc_type = log_record.attributes.get(SpanAttributes.EXCEPTION_TYPE)
+        exc_message = log_record.attributes.get(SpanAttributes.EXCEPTION_MESSAGE)
+        if exc_type is not None or exc_message is not None:
+            return _ExceptionData._from_log_record(log_record)
+        return _TraceData._from_log_record(log_record)
+
+
+@dataclass
+class _RequestData(_TelemetryData):
+    duration: float
+    success: bool
+    name: str
+    response_code: int
+    url: str
+
+    @staticmethod
+    @no_type_check
+    def _from_span(span: ReadableSpan):
+        # Logic should match that of exporter to Breeze
+        url = ""
+        duration_ms = 0
+        response_code = 0
+        success = True
+        attributes = {}
+        if span.end_time and span.start_time:
+            duration_ms = (span.end_time - span.start_time) / 1e9
+        if span.attributes:
+            attributes = span.attributes
+            url = trace_utils._get_url_for_http_request(attributes)
+            status_code = attributes.get(HTTP_RESPONSE_STATUS_CODE) or \
+                attributes.get(SpanAttributes.HTTP_STATUS_CODE)
+            if status_code:
+                try:
+                    status_code = int(status_code)
+                except ValueError:
+                    status_code = 0
+            else:
+                status_code = 0
+            success = span.status.is_ok and status_code and status_code not in range(400, 500)
+            response_code = status_code
+        return _RequestData(
+            duration=duration_ms,
+            success=success,
+            name=span.name,
+            response_code=response_code,
+            url=url or "",
+            custom_dimensions=attributes,
+        )
+
+
+@dataclass
+class _DependencyData(_TelemetryData):
+    duration: float
+    success: bool
+    name: str
+    result_code: int
+    target: str
+    type: str
+    data: str
+
+    @staticmethod
+    @no_type_check
+    def _from_span(span: ReadableSpan):
+        # Logic should match that of exporter to Breeze
+        url = ""
+        duration_ms = 0
+        result_code = 0
+        attributes = {}
+        dependency_type = "InProc"
+        data = ""
+        target = ""
+        if span.end_time and span.start_time:
+            duration_ms = (span.end_time - span.start_time) / 1e9
+        if span.attributes:
+            attributes = span.attributes
+            target = trace_utils._get_target_for_dependency_from_peer(attributes)
+            if span.kind is SpanKind.CLIENT:
+                if HTTP_REQUEST_METHOD in attributes or SpanAttributes.HTTP_METHOD in attributes:
+                    dependency_type = "HTTP"
+                    url = trace_utils._get_url_for_http_dependency(attributes)
+                    target, _ = trace_utils._get_target_and_path_for_http_dependency(
+                        attributes,
+                        url,
+                    )
+                    data = url
+                elif SpanAttributes.DB_SYSTEM in attributes:
+                    db_system = attributes[SpanAttributes.DB_SYSTEM]
+                    dependency_type = db_system
+                    target = trace_utils._get_target_for_db_dependency(
+                        target,
+                        db_system,
+                        attributes,
+                    )
+                    if SpanAttributes.DB_STATEMENT in attributes:
+                        data = attributes[SpanAttributes.DB_STATEMENT]
+                    elif SpanAttributes.DB_OPERATION in attributes:
+                        data = attributes[SpanAttributes.DB_OPERATION]
+                elif SpanAttributes.MESSAGING_SYSTEM in attributes:
+                    dependency_type = attributes[SpanAttributes.MESSAGING_SYSTEM]
+                    target = trace_utils._get_target_for_messaging_dependency(
+                        target,
+                        attributes,
+                    )
+                elif SpanAttributes.RPC_SYSTEM in attributes:
+                    dependency_type = attributes[SpanAttributes.RPC_SYSTEM]
+                    target = trace_utils._get_target_for_rpc_dependency(
+                        target,
+                        attributes,
+                    )
+                elif gen_ai_attributes.GEN_AI_SYSTEM in span.attributes:
+                    dependency_type = attributes[gen_ai_attributes.GEN_AI_SYSTEM]
+            elif span.kind is SpanKind.PRODUCER:
+                dependency_type = "Queue Message"
+                msg_system = attributes.get(SpanAttributes.MESSAGING_SYSTEM)
+                if msg_system:
+                    dependency_type += " | {}".format(msg_system)
+            else:
+                dependency_type = "InProc"
+
+        return _DependencyData(
+            duration=duration_ms,
+            success=span.status.is_ok,
+            name=span.name,
+            result_code=result_code,
+            target=target,
+            type=str(dependency_type),
+            data=data,
+            custom_dimensions=attributes,
+        )
+
+
+@dataclass
+class _ExceptionData(_TelemetryData):
+    message: str
+    stack_trace: str
+
+    @staticmethod
+    @no_type_check
+    def _from_log_record(log_record: LogRecord):
+        return _ExceptionData(
+            message=str(log_record.attributes.get(SpanAttributes.EXCEPTION_MESSAGE, "")),
+            stack_trace=str(log_record.attributes.get(SpanAttributes.EXCEPTION_STACKTRACE, "")),
+            custom_dimensions=log_record.attributes,
+        )
+
+    @staticmethod
+    @no_type_check
+    def _from_span_event(span_event: Event):
+        return _ExceptionData(
+            message=str(span_event.attributes.get(SpanAttributes.EXCEPTION_MESSAGE, "")),
+            stack_trace=str(span_event.attributes.get(SpanAttributes.EXCEPTION_STACKTRACE, "")),
+            custom_dimensions=span_event.attributes,
+        )
+
+
+@dataclass
+class _TraceData(_TelemetryData):
+    message: str
+
+    @staticmethod
+    @no_type_check
+    def _TraceData(log_record: LogRecord):
+        return _TraceData(
+            message=str(log_record.body),
+            custom_dimensions=log_record.attributes,
+        )
+
+    @staticmethod
+    @no_type_check
+    def _from_log_record(log_record: LogRecord):
+        return _TraceData(
+            message=str(log_record.body),
+            custom_dimensions=log_record.attributes,
+        )
+
+
+def _get_field_names(data_type: type):
+    field_map = {}
+    for field in fields(data_type):
+        field_map[field.name.replace("_", "").lower()] = field.name
+    return field_map
+
+
+_DEPENDENCY_DATA_FIELD_NAMES = _get_field_names(_DependencyData)
+_EXCEPTION_DATA_FIELD_NAMES = _get_field_names(_ExceptionData)
+_REQUEST_DATA_FIELD_NAMES = _get_field_names(_RequestData)
+_TRACE_DATA_FIELD_NAMES = _get_field_names(_TraceData)
+_DATA_FIELD_NAMES = {
+    _DependencyData: _DEPENDENCY_DATA_FIELD_NAMES,
+    _ExceptionData: _EXCEPTION_DATA_FIELD_NAMES,
+    _RequestData: _REQUEST_DATA_FIELD_NAMES,
+    _TraceData: _TRACE_DATA_FIELD_NAMES,
+}
+_KNOWN_STRING_FIELD_NAMES = (
+    "Url",
+    "Name",
+    "Target",
+    "Type",
+    "Data",
+    "Message",
+    "Exception.Message",
+    "Exception.StackTrace",
+)
diff --git a/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_utils.py b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_utils.py
new file mode 100644
index 00000000..fa6bcb9d
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_utils.py
@@ -0,0 +1,179 @@
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License.
+from datetime import datetime, timezone
+from typing import List, Optional, Tuple, Union
+
+from opentelemetry.sdk.metrics._internal.point import (
+    NumberDataPoint,
+    HistogramDataPoint,
+)
+from opentelemetry.sdk.metrics.export import MetricsData as OTMetricsData
+
+from azure.monitor.opentelemetry.exporter._quickpulse._constants import (
+    _QUICKPULSE_METRIC_NAME_MAPPINGS,
+    _QUICKPULSE_PROJECTION_MAX_VALUE,
+    _QUICKPULSE_PROJECTION_MIN_VALUE,
+)
+from azure.monitor.opentelemetry.exporter._quickpulse._generated.models import (
+    AggregationType,
+    DocumentIngress,
+    DocumentType,
+    Exception as ExceptionDocument,
+    MetricPoint,
+    MonitoringDataPoint,
+    RemoteDependency as RemoteDependencyDocument,
+    Request as RequestDocument,
+    Trace as TraceDocument,
+)
+from azure.monitor.opentelemetry.exporter._quickpulse._state import (
+    _get_quickpulse_projection_map,
+    _reset_quickpulse_projection_map,
+)
+from azure.monitor.opentelemetry.exporter._quickpulse._types import (
+    _DependencyData,
+    _ExceptionData,
+    _RequestData,
+    _TraceData,
+)
+
+
+def _metric_to_quick_pulse_data_points(  # pylint: disable=too-many-nested-blocks
+    metrics_data: OTMetricsData,
+    base_monitoring_data_point: MonitoringDataPoint,
+    documents: Optional[List[DocumentIngress]],
+) -> List[MonitoringDataPoint]:
+    metric_points = []
+    for resource_metric in metrics_data.resource_metrics:
+        for scope_metric in resource_metric.scope_metrics:
+            for metric in scope_metric.metrics:
+                for point in metric.data.data_points:
+                    if point is not None:
+                        value = 0
+                        if isinstance(point, HistogramDataPoint):
+                            if point.count > 0:
+                                value = point.sum / point.count
+                        elif isinstance(point, NumberDataPoint):
+                            value = point.value
+                        metric_point = MetricPoint(
+                            name=_QUICKPULSE_METRIC_NAME_MAPPINGS[metric.name.lower()],  # type: ignore
+                            weight=1,
+                            value=value,
+                        )
+                        metric_points.append(metric_point)
+    # Process filtered metrics
+    for metric in _get_metrics_from_projections():
+        metric_point = MetricPoint(
+            name=metric[0],  # type: ignore
+            weight=1,
+            value=metric[1],  # type: ignore
+        )
+        metric_points.append(metric_point)
+
+    # Reset projection map for next collection cycle
+    _reset_quickpulse_projection_map()
+
+    return [
+        MonitoringDataPoint(
+            version=base_monitoring_data_point.version,
+            invariant_version=base_monitoring_data_point.invariant_version,
+            instance=base_monitoring_data_point.instance,
+            role_name=base_monitoring_data_point.role_name,
+            machine_name=base_monitoring_data_point.machine_name,
+            stream_id=base_monitoring_data_point.stream_id,
+            is_web_app=base_monitoring_data_point.is_web_app,
+            performance_collection_supported=base_monitoring_data_point.performance_collection_supported,
+            timestamp=datetime.now(tz=timezone.utc),
+            metrics=metric_points,
+            documents=documents,
+        )
+    ]
+
+
+# mypy: disable-error-code="assignment,union-attr"
+def _get_span_document(data: Union[_DependencyData, _RequestData]) -> Union[RemoteDependencyDocument, RequestDocument]:
+    if isinstance(data, _DependencyData):
+        document = RemoteDependencyDocument(
+            document_type=DocumentType.REMOTE_DEPENDENCY,
+            name=data.name,
+            command_name=data.data,
+            result_code=str(data.result_code),
+            duration=_ms_to_iso8601_string(data.duration),
+        )
+    else:
+        document = RequestDocument(
+            document_type=DocumentType.REQUEST,
+            name=data.name,
+            url=data.url,
+            response_code=str(data.response_code),
+            duration=_ms_to_iso8601_string(data.duration),
+        )
+    return document
+
+
+# mypy: disable-error-code="assignment"
+def _get_log_record_document(data: Union[_ExceptionData, _TraceData], exc_type: Optional[str] = None) -> Union[ExceptionDocument, TraceDocument]:  # pylint: disable=C0301
+    if isinstance(data, _ExceptionData):
+        document = ExceptionDocument(
+            document_type=DocumentType.EXCEPTION,
+            exception_type=exc_type or "",
+            exception_message=data.message,
+        )
+    else:
+        document = TraceDocument(
+            document_type=DocumentType.TRACE,
+            message=data.message,
+        )
+    return document
+
+
+# Gets filtered metrics from projections to be exported
+# Called every second on export
+def _get_metrics_from_projections() -> List[Tuple[str, float]]:
+    metrics = []
+    projection_map = _get_quickpulse_projection_map()
+    for id, projection in projection_map.items():
+        metric_value = 0
+        aggregation_type = projection[0]
+        if aggregation_type == AggregationType.MIN:
+            metric_value = 0 if projection[1] == _QUICKPULSE_PROJECTION_MAX_VALUE else projection[1]
+        elif aggregation_type == AggregationType.MAX:
+            metric_value = 0 if projection[1] == _QUICKPULSE_PROJECTION_MIN_VALUE else projection[1]
+        elif aggregation_type == AggregationType.AVG:
+            metric_value = 0 if projection[2] == 0 else projection[1] / float(projection[2])
+        elif aggregation_type == AggregationType.SUM:
+            metric_value = projection[1]
+        metrics.append((id, metric_value))
+    return metrics  # type: ignore
+
+
+# Time
+
+def _ms_to_iso8601_string(ms: float) -> str:
+    seconds, ms = divmod(ms, 1000)
+    minutes, seconds = divmod(seconds, 60)
+    hours, minutes = divmod(minutes, 60)
+    days, hours = divmod(hours, 24)
+    years, days = divmod(days, 365)
+    months, days = divmod(days, 30)
+    duration = f"P{years}Y{months}M{days}DT{hours}H{minutes}M{seconds}.{int(ms):03d}S"
+    return duration
+
+
+def _filter_time_stamp_to_ms(time_stamp: str) -> Optional[int]:
+    # The service side will return a timestamp in the following format:
+    # [days].[hours]:[minutes]:[seconds]
+    # the seconds may be a whole number or something like 7.89. 7.89 seconds translates to 7890 ms.
+    # examples: "14.6:56:7.89" = 1234567890 ms, "0.0:0:0.2" = 200 ms
+    total_milliseconds = None
+    try:
+        days_hours, minutes, seconds = time_stamp.split(":")
+        days, hours = map(float, days_hours.split("."))
+        total_milliseconds = int(
+            days * 24 * 60 * 60 * 1000  # days to milliseconds
+            + hours * 60 * 60 * 1000  # hours to milliseconds
+            + float(minutes) * 60 * 1000  # minutes to milliseconds
+            + float(seconds) * 1000  # seconds to milliseconds
+        )
+    except Exception:  # pylint: disable=broad-except
+        pass
+    return total_milliseconds
diff --git a/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_validate.py b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_validate.py
new file mode 100644
index 00000000..6d760117
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_validate.py
@@ -0,0 +1,139 @@
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License.
+
+from azure.monitor.opentelemetry.exporter._quickpulse._generated.models import (
+    DerivedMetricInfo,
+    DocumentFilterConjunctionGroupInfo,
+    FilterInfo,
+    PredicateType,
+    TelemetryType,
+)
+from azure.monitor.opentelemetry.exporter._quickpulse._types import (
+    _DEPENDENCY_DATA_FIELD_NAMES,
+    _KNOWN_STRING_FIELD_NAMES,
+    _REQUEST_DATA_FIELD_NAMES,
+)
+from azure.monitor.opentelemetry.exporter._quickpulse._utils import _filter_time_stamp_to_ms
+
+
+def _validate_derived_metric_info(metric_info: DerivedMetricInfo) -> bool:
+    if not _validate_telemetry_type(metric_info.telemetry_type):
+        return False
+    if not _validate_custom_metric_projection(metric_info):
+        return False
+    # Validate filters
+    for filter_group in metric_info.filter_groups:
+        for filter in filter_group.filters:
+            # Validate field names to telemetry type
+            # Validate predicate and comparands
+            if not _validate_filter_field_name(filter.field_name, metric_info.telemetry_type) or not \
+                _validate_filter_predicate_and_comparand(filter):
+                return False
+    return True
+
+
+def _validate_document_filter_group_info(doc_filter_group: DocumentFilterConjunctionGroupInfo) -> bool:
+    if not _validate_telemetry_type(doc_filter_group.telemetry_type):
+        return False
+    # Validate filters
+    for filter in doc_filter_group.filters.filters:
+        # Validate field names to telemetry type
+        # Validate predicate and comparands
+        if not _validate_filter_field_name(filter.field_name, doc_filter_group.telemetry_type) or not \
+            _validate_filter_predicate_and_comparand(filter):
+            return False
+    return True
+
+
+def _validate_telemetry_type(telemetry_type: str) -> bool:
+     # Validate telemetry type
+    try:
+        telemetry_type = TelemetryType(telemetry_type)
+    except Exception:  # pylint: disable=broad-except
+        return False
+    # Only REQUEST, DEPENDENCY, EXCEPTION, TRACE are supported
+    # No filtering options in UX for PERFORMANCE_COUNTERS
+    if telemetry_type not in (
+        TelemetryType.REQUEST,
+        TelemetryType.DEPENDENCY,
+        TelemetryType.EXCEPTION,
+        TelemetryType.TRACE,
+    ):
+        return False
+    return True
+
+
+def _validate_custom_metric_projection(metric_info: DerivedMetricInfo) -> bool:
+    # Check for CustomMetric projection
+    if metric_info.projection and metric_info.projection.startswith("CustomMetrics."):
+        return False
+    return True
+
+
+# pylint: disable=R0911
+def _validate_filter_field_name(name: str, telemetry_type: str) -> bool:
+    if not name:
+        return False
+    if name.startswith("CustomMetrics."):
+        return False
+    if name.startswith("CustomDimensions.") or name == "*":
+        return True
+    name = name.lower()
+    if telemetry_type == TelemetryType.DEPENDENCY.value:
+        if name not in _DEPENDENCY_DATA_FIELD_NAMES:
+            return False
+    elif telemetry_type == TelemetryType.REQUEST.value:
+        if name not in _REQUEST_DATA_FIELD_NAMES:
+            return False
+    elif telemetry_type == TelemetryType.EXCEPTION.value:
+        if name not in ("exception.message", "exception.stacktrace"):
+            return False
+    elif telemetry_type == TelemetryType.TRACE.value:
+        if name != "message":
+            return False
+    else:
+        return True
+    return True
+
+
+# pylint: disable=R0911
+def _validate_filter_predicate_and_comparand(filter: FilterInfo) -> bool:
+    name = filter.field_name
+    comparand = filter.comparand
+    # Validate predicate type
+    try:
+        predicate = PredicateType(filter.predicate)
+    except Exception:  # pylint: disable=broad-except
+        return False
+    if not comparand:
+        return False
+    if name == "*" and predicate not in (PredicateType.CONTAINS, PredicateType.DOES_NOT_CONTAIN):
+        return False
+    if name in ("ResultCode", "ResponseCode", "Duration"):
+        if predicate in (PredicateType.CONTAINS, PredicateType.DOES_NOT_CONTAIN):
+            return False
+        if name == "Duration":
+            # Duration comparand should be a string timestamp
+            if _filter_time_stamp_to_ms(comparand) is None:
+                return False
+        else:
+            try:
+                # Response/ResultCode comparand should be interpreted as integer
+                int(comparand)
+            except Exception:  # pylint: disable=broad-except
+                return False
+    elif name == "Success":
+        if predicate not in (PredicateType.EQUAL, PredicateType.NOT_EQUAL):
+            return False
+        comparand = comparand.lower()
+        if comparand not in ("true", "false"):
+            return False
+    elif name in _KNOWN_STRING_FIELD_NAMES or name.startswith("CustomDimensions."):
+        if predicate in (
+            PredicateType.GREATER_THAN,
+            PredicateType.GREATER_THAN_OR_EQUAL,
+            PredicateType.LESS_THAN,
+            PredicateType.LESS_THAN_OR_EQUAL,
+        ):
+            return False
+    return True