aboutsummaryrefslogtreecommitdiff
path: root/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter
diff options
context:
space:
mode:
Diffstat (limited to '.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter')
-rw-r--r--.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/__init__.py19
-rw-r--r--.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_connection_string_parser.py121
-rw-r--r--.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_constants.py216
-rw-r--r--.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_generated/__init__.py17
-rw-r--r--.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_generated/_azure_monitor_client.py92
-rw-r--r--.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_generated/_configuration.py58
-rw-r--r--.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_generated/_patch.py32
-rw-r--r--.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_generated/_vendor.py16
-rw-r--r--.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_generated/aio/__init__.py17
-rw-r--r--.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_generated/aio/_azure_monitor_client.py74
-rw-r--r--.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_generated/aio/_configuration.py46
-rw-r--r--.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_generated/aio/_patch.py32
-rw-r--r--.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_generated/aio/operations/__init__.py13
-rw-r--r--.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_generated/aio/operations/_azure_monitor_client_operations.py102
-rw-r--r--.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_generated/models/__init__.py73
-rw-r--r--.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_generated/models/_azure_monitor_client_enums.py60
-rw-r--r--.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_generated/models/_models.py1167
-rw-r--r--.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_generated/models/_models_py3.py1342
-rw-r--r--.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_generated/operations/__init__.py13
-rw-r--r--.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_generated/operations/_azure_monitor_client_operations.py140
-rw-r--r--.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_generated/py.typed1
-rw-r--r--.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/__init__.py11
-rw-r--r--.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_constants.py66
-rw-r--r--.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_cpu.py63
-rw-r--r--.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_exporter.py351
-rw-r--r--.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_filter.py194
-rw-r--r--.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/__init__.py23
-rw-r--r--.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/_client.py93
-rw-r--r--.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/_configuration.py59
-rw-r--r--.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/_operations/__init__.py19
-rw-r--r--.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/_operations/_operations.py544
-rw-r--r--.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/_operations/_patch.py20
-rw-r--r--.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/_patch.py20
-rw-r--r--.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/_serialization.py1998
-rw-r--r--.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/_vendor.py26
-rw-r--r--.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/aio/__init__.py23
-rw-r--r--.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/aio/_client.py95
-rw-r--r--.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/aio/_configuration.py59
-rw-r--r--.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/aio/_operations/__init__.py19
-rw-r--r--.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/aio/_operations/_operations.py464
-rw-r--r--.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/aio/_operations/_patch.py20
-rw-r--r--.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/aio/_patch.py20
-rw-r--r--.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/aio/_vendor.py26
-rw-r--r--.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/models/__init__.py65
-rw-r--r--.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/models/_enums.py111
-rw-r--r--.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/models/_models.py1123
-rw-r--r--.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/models/_patch.py20
-rw-r--r--.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/py.typed1
-rw-r--r--.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_live_metrics.py306
-rw-r--r--.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_policy.py36
-rw-r--r--.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_processor.py33
-rw-r--r--.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_projection.py98
-rw-r--r--.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_state.py190
-rw-r--r--.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_types.py235
-rw-r--r--.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_utils.py179
-rw-r--r--.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_validate.py139
-rw-r--r--.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_storage.py215
-rw-r--r--.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_utils.py302
-rw-r--r--.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_version.py8
-rw-r--r--.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/export/__init__.py0
-rw-r--r--.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/export/_base.py435
-rw-r--r--.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/export/logs/__init__.py0
-rw-r--r--.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/export/logs/_exporter.py244
-rw-r--r--.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/export/metrics/__init__.py0
-rw-r--r--.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/export/metrics/_exporter.py291
-rw-r--r--.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/export/trace/__init__.py0
-rw-r--r--.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/export/trace/_exporter.py553
-rw-r--r--.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/export/trace/_sampling.py98
-rw-r--r--.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/export/trace/_utils.py321
-rw-r--r--.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/py.typed1
-rw-r--r--.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/statsbeat/__init__.py0
-rw-r--r--.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/statsbeat/_exporter.py29
-rw-r--r--.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/statsbeat/_state.py70
-rw-r--r--.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/statsbeat/_statsbeat.py77
-rw-r--r--.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/statsbeat/_statsbeat_metrics.py417
-rw-r--r--.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/statsbeat/_utils.py69
76 files changed, 13530 insertions, 0 deletions
diff --git a/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/__init__.py b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/__init__.py
new file mode 100644
index 00000000..feaa0fec
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/__init__.py
@@ -0,0 +1,19 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# --------------------------------------------------------------------------
+
+from azure.monitor.opentelemetry.exporter.export.logs._exporter import AzureMonitorLogExporter
+from azure.monitor.opentelemetry.exporter.export.metrics._exporter import AzureMonitorMetricExporter
+from azure.monitor.opentelemetry.exporter.export.trace._exporter import AzureMonitorTraceExporter
+from azure.monitor.opentelemetry.exporter.export.trace._sampling import ApplicationInsightsSampler
+from ._version import VERSION
+
+__all__ = [
+ "ApplicationInsightsSampler",
+ "AzureMonitorMetricExporter",
+ "AzureMonitorLogExporter",
+ "AzureMonitorTraceExporter",
+]
+__version__ = VERSION
diff --git a/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_connection_string_parser.py b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_connection_string_parser.py
new file mode 100644
index 00000000..51f9e60d
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_connection_string_parser.py
@@ -0,0 +1,121 @@
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License.
+import os
+import re
+import typing
+
+LIVE_ENDPOINT = "liveendpoint"
+INGESTION_ENDPOINT = "ingestionendpoint"
+INSTRUMENTATION_KEY = "instrumentationkey"
+# cspell:disable-next-line
+AAD_AUDIENCE = "aadaudience"
+
+# Validate UUID format
+# Specs taken from https://tools.ietf.org/html/rfc4122
+uuid_regex_pattern = re.compile("^[0-9a-f]{8}-" "[0-9a-f]{4}-" + "[0-9a-f]{4}-" "[0-9a-f]{4}-" "[0-9a-f]{12}$")
+
+
+class ConnectionStringParser:
+ """ConnectionString parser.
+
+ :param connection_string: Azure Connection String.
+ :type: str
+ :rtype: None
+ """
+
+ def __init__(self, connection_string: typing.Optional[str] = None) -> None:
+ self.instrumentation_key = None
+ self.endpoint = ""
+ self.live_endpoint = ""
+ self._connection_string = connection_string
+ self.aad_audience = ""
+ self._initialize()
+ self._validate_instrumentation_key()
+
+ def _initialize(self) -> None:
+ # connection string and ikey
+ code_cs = self._parse_connection_string(self._connection_string)
+ code_ikey = self.instrumentation_key
+ env_cs = self._parse_connection_string(os.getenv("APPLICATIONINSIGHTS_CONNECTION_STRING"))
+ env_ikey = os.getenv("APPINSIGHTS_INSTRUMENTATIONKEY")
+
+ # The priority of which value takes on the instrumentation key is:
+ # 1. Key from explicitly passed in connection string
+ # 2. Key from explicitly passed in instrumentation key
+ # 3. Key from connection string in environment variable
+ # 4. Key from instrumentation key in environment variable
+ self.instrumentation_key = (
+ code_cs.get(INSTRUMENTATION_KEY) or code_ikey or \
+ env_cs.get(INSTRUMENTATION_KEY) or env_ikey # type: ignore
+ )
+ # The priority of the endpoints is as follows:
+ # 1. The endpoint explicitly passed in connection string
+ # 2. The endpoint from the connection string in environment variable
+ # 3. The default breeze endpoint
+ self.endpoint = (
+ code_cs.get(INGESTION_ENDPOINT) or env_cs.get(INGESTION_ENDPOINT) or \
+ "https://dc.services.visualstudio.com"
+ )
+ self.live_endpoint = (
+ code_cs.get(LIVE_ENDPOINT) or env_cs.get(LIVE_ENDPOINT) or \
+ "https://rt.services.visualstudio.com"
+ )
+ # The AUDIENCE is a url that identifies Azure Monitor in a specific cloud
+ # (For example: "https://monitor.azure.com/").
+ self.aad_audience = (
+ code_cs.get(AAD_AUDIENCE) or env_cs.get(AAD_AUDIENCE) # type: ignore
+ )
+
+ def _validate_instrumentation_key(self) -> None:
+ """Validates the instrumentation key used for Azure Monitor.
+
+ An instrumentation key cannot be null or empty. An instrumentation key
+ is valid for Azure Monitor only if it is a valid UUID.
+ """
+ if not self.instrumentation_key:
+ raise ValueError("Instrumentation key cannot be none or empty.")
+ match = uuid_regex_pattern.match(self.instrumentation_key)
+ if not match:
+ raise ValueError("Invalid instrumentation key. It should be a valid UUID.")
+
+ def _parse_connection_string(self, connection_string) -> typing.Dict:
+ if connection_string is None:
+ return {}
+ try:
+ pairs = connection_string.split(";")
+ result = dict(s.split("=") for s in pairs)
+ # Convert keys to lower-case due to case type-insensitive checking
+ result = {key.lower(): value for key, value in result.items()}
+ except Exception as exc:
+ raise ValueError("Invalid connection string") from exc
+ # Validate authorization
+ auth = result.get("authorization")
+ if auth is not None and auth.lower() != "ikey":
+ raise ValueError("Invalid authorization mechanism")
+
+ # Construct the endpoints if not passed in explicitly
+ endpoint_suffix = ""
+ location_prefix = ""
+ suffix = result.get("endpointsuffix")
+ # Get regional information if provided
+ prefix = result.get("location")
+ if suffix is not None:
+ endpoint_suffix = suffix
+ # Get regional information if provided
+ prefix = result.get("location")
+ if prefix is not None:
+ location_prefix = prefix + "."
+ # Construct the endpoints if not passed in explicitly
+ if result.get(INGESTION_ENDPOINT) is None:
+ if endpoint_suffix:
+ result[INGESTION_ENDPOINT] = "https://{0}dc.{1}".format(location_prefix, endpoint_suffix)
+ else:
+ # Default to None if cannot construct
+ result[INGESTION_ENDPOINT] = None
+ if result.get(LIVE_ENDPOINT) is None:
+ if endpoint_suffix:
+ result[LIVE_ENDPOINT] = "https://{0}live.{1}".format(location_prefix, endpoint_suffix)
+ else:
+ result[LIVE_ENDPOINT] = None
+
+ return result
diff --git a/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_constants.py b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_constants.py
new file mode 100644
index 00000000..a82881e2
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_constants.py
@@ -0,0 +1,216 @@
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License.
+# cSpell:disable
+
+from opentelemetry.semconv.metrics import MetricInstruments
+from opentelemetry.semconv.metrics.http_metrics import (
+ HTTP_CLIENT_REQUEST_DURATION,
+ HTTP_SERVER_REQUEST_DURATION,
+)
+
+# Environment variables
+
+_APPLICATIONINSIGHTS_STATSBEAT_DISABLED_ALL = "APPLICATIONINSIGHTS_STATSBEAT_DISABLED_ALL"
+_APPLICATIONINSIGHTS_OPENTELEMETRY_RESOURCE_METRIC_DISABLED = (
+ "APPLICATIONINSIGHTS_OPENTELEMETRY_RESOURCE_METRIC_DISABLED"
+)
+_APPLICATIONINSIGHTS_METRIC_NAMESPACE_OPT_IN = "APPLICATIONINSIGHTS_METRIC_NAMESPACE_OPT_IN"
+
+# RPs
+
+_WEBSITE_SITE_NAME = "WEBSITE_SITE_NAME"
+_WEBSITE_HOME_STAMPNAME = "WEBSITE_HOME_STAMPNAME"
+_WEBSITE_HOSTNAME = "WEBSITE_HOSTNAME"
+_FUNCTIONS_WORKER_RUNTIME = "FUNCTIONS_WORKER_RUNTIME"
+_PYTHON_APPLICATIONINSIGHTS_ENABLE_TELEMETRY = "PYTHON_APPLICATIONINSIGHTS_ENABLE_TELEMETRY"
+_AKS_ARM_NAMESPACE_ID = "AKS_ARM_NAMESPACE_ID"
+
+# Network
+
+_INVALID_STATUS_CODES = (400,) # Invalid Instrumentation Key/data
+
+_REDIRECT_STATUS_CODES = (
+ 307, # Temporary redirect
+ 308, # Permanent redirect
+)
+
+_RETRYABLE_STATUS_CODES = (
+ 401, # Unauthorized
+ 403, # Forbidden
+ 408, # Request Timeout
+ 429, # Too Many Requests - retry after
+ 500, # Internal Server Error
+ 502, # BadGateway
+ 503, # Service Unavailable
+ 504, # Gateway timeout
+)
+
+_THROTTLE_STATUS_CODES = (
+ 402, # Quota, too Many Requests over extended time
+ 439, # Quota, too Many Requests over extended time (legacy)
+)
+
+_REACHED_INGESTION_STATUS_CODES = (200, 206, 402, 408, 429, 439, 500)
+
+# Envelope constants
+
+_METRIC_ENVELOPE_NAME = "Microsoft.ApplicationInsights.Metric"
+_EXCEPTION_ENVELOPE_NAME = "Microsoft.ApplicationInsights.Exception"
+_MESSAGE_ENVELOPE_NAME = "Microsoft.ApplicationInsights.Message"
+_REQUEST_ENVELOPE_NAME = "Microsoft.ApplicationInsights.Request"
+_REMOTE_DEPENDENCY_ENVELOPE_NAME = "Microsoft.ApplicationInsights.RemoteDependency"
+
+# Feature constants
+_APPLICATION_INSIGHTS_EVENT_MARKER_ATTRIBUTE = "APPLICATION_INSIGHTS_EVENT_MARKER_ATTRIBUTE"
+_AZURE_MONITOR_DISTRO_VERSION_ARG = "distro_version"
+_MICROSOFT_CUSTOM_EVENT_NAME = "microsoft.custom_event.name"
+
+# Statsbeat
+
+# (OpenTelemetry metric name, Statsbeat metric name)
+_ATTACH_METRIC_NAME = ("attach", "Attach")
+_FEATURE_METRIC_NAME = ("feature", "Feature")
+_REQ_EXCEPTION_NAME = ("statsbeat_exception_count", "Exception_Count")
+_REQ_DURATION_NAME = ("statsbeat_duration", "Request_Duration")
+_REQ_FAILURE_NAME = ("statsbeat_failure_count", "Request_Failure_Count")
+_REQ_RETRY_NAME = ("statsbeat_retry_count", "Retry_Count")
+_REQ_SUCCESS_NAME = ("statsbeat_success_count", "Request_Success_Count")
+_REQ_THROTTLE_NAME = ("statsbeat_throttle_count", "Throttle_Count")
+
+_STATSBEAT_METRIC_NAME_MAPPINGS = dict(
+ [
+ _ATTACH_METRIC_NAME,
+ _FEATURE_METRIC_NAME,
+ _REQ_DURATION_NAME,
+ _REQ_EXCEPTION_NAME,
+ _REQ_FAILURE_NAME,
+ _REQ_SUCCESS_NAME,
+ _REQ_RETRY_NAME,
+ _REQ_THROTTLE_NAME,
+ ]
+)
+_APPLICATIONINSIGHTS_STATS_CONNECTION_STRING_ENV_NAME = "APPLICATIONINSIGHTS_STATS_CONNECTION_STRING"
+_APPLICATIONINSIGHTS_STATS_SHORT_EXPORT_INTERVAL_ENV_NAME = "APPLICATIONINSIGHTS_STATS_SHORT_EXPORT_INTERVAL"
+_APPLICATIONINSIGHTS_STATS_LONG_EXPORT_INTERVAL_ENV_NAME = "APPLICATIONINSIGHTS_STATS_LONG_EXPORT_INTERVAL"
+# pylint: disable=line-too-long
+_DEFAULT_NON_EU_STATS_CONNECTION_STRING = "InstrumentationKey=c4a29126-a7cb-47e5-b348-11414998b11e;IngestionEndpoint=https://westus-0.in.applicationinsights.azure.com/"
+_DEFAULT_EU_STATS_CONNECTION_STRING = "InstrumentationKey=7dc56bab-3c0c-4e9f-9ebb-d1acadee8d0f;IngestionEndpoint=https://westeurope-5.in.applicationinsights.azure.com/"
+_DEFAULT_STATS_SHORT_EXPORT_INTERVAL = 900 # 15 minutes
+_DEFAULT_STATS_LONG_EXPORT_INTERVAL = 86400 # 24 hours
+_EU_ENDPOINTS = [
+ "westeurope",
+ "northeurope",
+ "francecentral",
+ "francesouth",
+ "germanywestcentral",
+ "norwayeast",
+ "norwaywest",
+ "swedencentral",
+ "switzerlandnorth",
+ "switzerlandwest",
+ "uksouth",
+ "ukwest",
+]
+
+# Instrumentations
+
+# Special constant for azure-sdk opentelemetry instrumentation
+_AZURE_SDK_OPENTELEMETRY_NAME = "azure-sdk-opentelemetry"
+_AZURE_SDK_NAMESPACE_NAME = "az.namespace"
+
+_BASE = 2
+
+_INSTRUMENTATIONS_LIST = [
+ "django",
+ "flask",
+ "google_cloud",
+ "http_lib",
+ "logging",
+ "mysql",
+ "psycopg2",
+ "pymongo",
+ "pymysql",
+ "pyramid",
+ "requests",
+ "sqlalchemy",
+ "aio-pika",
+ "aiohttp-client",
+ "aiopg",
+ "asgi",
+ "asyncpg",
+ "celery",
+ "confluent-kafka",
+ "dbapi",
+ "elasticsearch",
+ "falcon",
+ "fastapi",
+ "grpc",
+ "httpx",
+ "jinja2",
+ "kafka-python",
+ "pika",
+ "pymemcache",
+ "redis",
+ "remoulade",
+ "sklearn",
+ "sqlite3",
+ "starlette",
+ "system-metrics",
+ "tornado",
+ "urllib",
+ "urllib3",
+ _AZURE_SDK_OPENTELEMETRY_NAME,
+ # Instrumentations below this line have not been added to statsbeat report yet
+ "cassandra",
+ "tortoiseorm",
+ "aiohttp-server",
+ "asyncio",
+ "mysqlclient",
+ "psycopg",
+ "threading",
+ "wsgi",
+]
+
+_INSTRUMENTATIONS_BIT_MAP = {_INSTRUMENTATIONS_LIST[i]: _BASE**i for i in range(len(_INSTRUMENTATIONS_LIST))}
+
+# Standard metrics
+
+# List of metric instrument names that are autocollected from instrumentations
+_AUTOCOLLECTED_INSTRUMENT_NAMES = (
+ HTTP_CLIENT_REQUEST_DURATION,
+ HTTP_SERVER_REQUEST_DURATION,
+ MetricInstruments.HTTP_SERVER_DURATION,
+ MetricInstruments.HTTP_SERVER_REQUEST_SIZE,
+ MetricInstruments.HTTP_SERVER_RESPONSE_SIZE,
+ MetricInstruments.HTTP_SERVER_ACTIVE_REQUESTS,
+ MetricInstruments.HTTP_CLIENT_DURATION,
+ MetricInstruments.HTTP_CLIENT_REQUEST_SIZE,
+ MetricInstruments.HTTP_CLIENT_RESPONSE_SIZE,
+)
+
+# Temporary solution for checking which instrumentations support metric collection
+_INSTRUMENTATION_SUPPORTING_METRICS_LIST = (
+ "opentelemetry.instrumentation.asgi",
+ "opentelemetry.instrumentation.django",
+ "opentelemetry.instrumentation.falcon",
+ "opentelemetry.instrumentation.fastapi",
+ "opentelemetry.instrumentation.flask",
+ "opentelemetry.instrumentation.pyramid",
+ "opentelemetry.instrumentation.requests",
+ "opentelemetry-instrumentation-sqlalchemy",
+ "opentelemetry.instrumentation.starlette",
+ "opentelemetry-instrumentation-tornado",
+ "opentelemetry-instrumentation-urllib",
+ "opentelemetry.instrumentation.urllib3",
+ "opentelemetry.instrumentation.wsgi",
+)
+
+# sampleRate
+
+_SAMPLE_RATE_KEY = "_MS.sampleRate"
+
+# AAD Auth
+
+_DEFAULT_AAD_SCOPE = "https://monitor.azure.com//.default"
+
+# cSpell:disable
diff --git a/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_generated/__init__.py b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_generated/__init__.py
new file mode 100644
index 00000000..6d010691
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_generated/__init__.py
@@ -0,0 +1,17 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from ._azure_monitor_client import AzureMonitorClient
+
+__all__ = ["AzureMonitorClient"]
+
+# `._patch.py` is used for handwritten extensions to the generated code
+# Example: https://github.com/Azure/azure-sdk-for-python/blob/main/doc/dev/customize_code/how-to-patch-sdk-code.md
+from ._patch import patch_sdk
+
+patch_sdk()
diff --git a/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_generated/_azure_monitor_client.py b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_generated/_azure_monitor_client.py
new file mode 100644
index 00000000..6ea083bb
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_generated/_azure_monitor_client.py
@@ -0,0 +1,92 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from copy import deepcopy
+from typing import TYPE_CHECKING
+
+from msrest import Deserializer, Serializer
+
+from azure.core import PipelineClient
+
+from . import models
+from ._configuration import AzureMonitorClientConfiguration
+from .operations import AzureMonitorClientOperationsMixin
+
+if TYPE_CHECKING:
+ # pylint: disable=unused-import,ungrouped-imports
+ from typing import Any
+
+ from azure.core.rest import HttpRequest, HttpResponse
+
+
+class AzureMonitorClient(AzureMonitorClientOperationsMixin):
+ """OpenTelemetry Exporter for Azure Monitor.
+
+ :param host: Breeze endpoint: https://dc.services.visualstudio.com. Default value is
+ "https://dc.services.visualstudio.com".
+ :type host: str
+ """
+
+ def __init__(
+ self,
+ host="https://dc.services.visualstudio.com", # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> None
+ _base_url = "{Host}/v2.1"
+ self._config = AzureMonitorClientConfiguration(host=host, **kwargs)
+ self._client = PipelineClient(base_url=_base_url, config=self._config, **kwargs)
+
+ client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)}
+ self._serialize = Serializer(client_models)
+ self._deserialize = Deserializer(client_models)
+ self._serialize.client_side_validation = False
+
+ def _send_request(
+ self,
+ request, # type: HttpRequest
+ **kwargs # type: Any
+ ):
+ # type: (...) -> HttpResponse
+ """Runs the network request through the client's chained policies.
+
+ >>> from azure.core.rest import HttpRequest
+ >>> request = HttpRequest("GET", "https://www.example.org/")
+ <HttpRequest [GET], url: 'https://www.example.org/'>
+ >>> response = client._send_request(request)
+ <HttpResponse: 200 OK>
+
+ For more information on this code flow, see https://aka.ms/azsdk/python/protocol/quickstart
+
+ :param request: The network request you want to make. Required.
+ :type request: ~azure.core.rest.HttpRequest
+ :keyword bool stream: Whether the response payload will be streamed. Defaults to False.
+ :return: The response of your network call. Does not do error handling on your response.
+ :rtype: ~azure.core.rest.HttpResponse
+ """
+
+ request_copy = deepcopy(request)
+ path_format_arguments = {
+ "Host": self._serialize.url("self._config.host", self._config.host, "str", skip_quote=True),
+ }
+
+ request_copy.url = self._client.format_url(request_copy.url, **path_format_arguments)
+ return self._client.send_request(request_copy, **kwargs)
+
+ def close(self):
+ # type: () -> None
+ self._client.close()
+
+ def __enter__(self):
+ # type: () -> AzureMonitorClient
+ self._client.__enter__()
+ return self
+
+ def __exit__(self, *exc_details):
+ # type: (Any) -> None
+ self._client.__exit__(*exc_details)
diff --git a/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_generated/_configuration.py b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_generated/_configuration.py
new file mode 100644
index 00000000..8fa05dbe
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_generated/_configuration.py
@@ -0,0 +1,58 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from typing import TYPE_CHECKING
+
+from azure.core.configuration import Configuration
+from azure.core.pipeline import policies
+
+if TYPE_CHECKING:
+ # pylint: disable=unused-import,ungrouped-imports
+ from typing import Any
+
+VERSION = "unknown"
+
+
+class AzureMonitorClientConfiguration(Configuration): # pylint: disable=too-many-instance-attributes
+ """Configuration for AzureMonitorClient.
+
+ Note that all parameters used to create this instance are saved as instance
+ attributes.
+
+ :param host: Breeze endpoint: https://dc.services.visualstudio.com. Default value is
+ "https://dc.services.visualstudio.com".
+ :type host: str
+ """
+
+ def __init__(
+ self,
+ host="https://dc.services.visualstudio.com", # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> None
+ super(AzureMonitorClientConfiguration, self).__init__(**kwargs)
+ if host is None:
+ raise ValueError("Parameter 'host' must not be None.")
+
+ self.host = host
+ kwargs.setdefault("sdk_moniker", "azuremonitorclient/{}".format(VERSION))
+ self._configure(**kwargs)
+
+ def _configure(
+ self, **kwargs # type: Any
+ ):
+ # type: (...) -> None
+ self.user_agent_policy = kwargs.get("user_agent_policy") or policies.UserAgentPolicy(**kwargs)
+ self.headers_policy = kwargs.get("headers_policy") or policies.HeadersPolicy(**kwargs)
+ self.proxy_policy = kwargs.get("proxy_policy") or policies.ProxyPolicy(**kwargs)
+ self.logging_policy = kwargs.get("logging_policy") or policies.NetworkTraceLoggingPolicy(**kwargs)
+ self.http_logging_policy = kwargs.get("http_logging_policy") or policies.HttpLoggingPolicy(**kwargs)
+ self.retry_policy = kwargs.get("retry_policy") or policies.RetryPolicy(**kwargs)
+ self.custom_hook_policy = kwargs.get("custom_hook_policy") or policies.CustomHookPolicy(**kwargs)
+ self.redirect_policy = kwargs.get("redirect_policy") or policies.RedirectPolicy(**kwargs)
+ self.authentication_policy = kwargs.get("authentication_policy")
diff --git a/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_generated/_patch.py b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_generated/_patch.py
new file mode 100644
index 00000000..17dbc073
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_generated/_patch.py
@@ -0,0 +1,32 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+#
+# Copyright (c) Microsoft Corporation. All rights reserved.
+#
+# The MIT License (MIT)
+#
+# Permission is hereby granted, free of charge, to any person obtaining a copy
+# of this software and associated documentation files (the ""Software""), to
+# deal in the Software without restriction, including without limitation the
+# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+# sell copies of the Software, and to permit persons to whom the Software is
+# furnished to do so, subject to the following conditions:
+#
+# The above copyright notice and this permission notice shall be included in
+# all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+# IN THE SOFTWARE.
+#
+# --------------------------------------------------------------------------
+
+
+# This file is used for handwritten extensions to the generated code. Example:
+# https://github.com/Azure/azure-sdk-for-python/blob/main/doc/dev/customize_code/how-to-patch-sdk-code.md
+def patch_sdk():
+ pass
diff --git a/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_generated/_vendor.py b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_generated/_vendor.py
new file mode 100644
index 00000000..0dafe0e2
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_generated/_vendor.py
@@ -0,0 +1,16 @@
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from azure.core.pipeline.transport import HttpRequest
+
+
+def _convert_request(request, files=None):
+ data = request.content if not files else None
+ request = HttpRequest(method=request.method, url=request.url, headers=request.headers, data=data)
+ if files:
+ request.set_formdata_body(files)
+ return request
diff --git a/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_generated/aio/__init__.py b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_generated/aio/__init__.py
new file mode 100644
index 00000000..6d010691
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_generated/aio/__init__.py
@@ -0,0 +1,17 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from ._azure_monitor_client import AzureMonitorClient
+
+__all__ = ["AzureMonitorClient"]
+
+# `._patch.py` is used for handwritten extensions to the generated code
+# Example: https://github.com/Azure/azure-sdk-for-python/blob/main/doc/dev/customize_code/how-to-patch-sdk-code.md
+from ._patch import patch_sdk
+
+patch_sdk()
diff --git a/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_generated/aio/_azure_monitor_client.py b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_generated/aio/_azure_monitor_client.py
new file mode 100644
index 00000000..e829f0d4
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_generated/aio/_azure_monitor_client.py
@@ -0,0 +1,74 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from copy import deepcopy
+from typing import Any, Awaitable
+
+from msrest import Deserializer, Serializer
+
+from azure.core import AsyncPipelineClient
+from azure.core.rest import AsyncHttpResponse, HttpRequest
+
+from .. import models
+from ._configuration import AzureMonitorClientConfiguration
+from .operations import AzureMonitorClientOperationsMixin
+
+
+class AzureMonitorClient(AzureMonitorClientOperationsMixin):
+ """OpenTelemetry Exporter for Azure Monitor.
+
+ :param host: Breeze endpoint: https://dc.services.visualstudio.com. Default value is
+ "https://dc.services.visualstudio.com".
+ :type host: str
+ """
+
+ def __init__(self, host: str = "https://dc.services.visualstudio.com", **kwargs: Any) -> None:
+ _base_url = "{Host}/v2.1"
+ self._config = AzureMonitorClientConfiguration(host=host, **kwargs)
+ self._client = AsyncPipelineClient(base_url=_base_url, config=self._config, **kwargs)
+
+ client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)}
+ self._serialize = Serializer(client_models)
+ self._deserialize = Deserializer(client_models)
+ self._serialize.client_side_validation = False
+
+ def _send_request(self, request: HttpRequest, **kwargs: Any) -> Awaitable[AsyncHttpResponse]:
+ """Runs the network request through the client's chained policies.
+
+ >>> from azure.core.rest import HttpRequest
+ >>> request = HttpRequest("GET", "https://www.example.org/")
+ <HttpRequest [GET], url: 'https://www.example.org/'>
+ >>> response = await client._send_request(request)
+ <AsyncHttpResponse: 200 OK>
+
+ For more information on this code flow, see https://aka.ms/azsdk/python/protocol/quickstart
+
+ :param request: The network request you want to make. Required.
+ :type request: ~azure.core.rest.HttpRequest
+ :keyword bool stream: Whether the response payload will be streamed. Defaults to False.
+ :return: The response of your network call. Does not do error handling on your response.
+ :rtype: ~azure.core.rest.AsyncHttpResponse
+ """
+
+ request_copy = deepcopy(request)
+ path_format_arguments = {
+ "Host": self._serialize.url("self._config.host", self._config.host, "str", skip_quote=True),
+ }
+
+ request_copy.url = self._client.format_url(request_copy.url, **path_format_arguments)
+ return self._client.send_request(request_copy, **kwargs)
+
+ async def close(self) -> None:
+ await self._client.close()
+
+ async def __aenter__(self) -> "AzureMonitorClient":
+ await self._client.__aenter__()
+ return self
+
+ async def __aexit__(self, *exc_details) -> None:
+ await self._client.__aexit__(*exc_details)
diff --git a/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_generated/aio/_configuration.py b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_generated/aio/_configuration.py
new file mode 100644
index 00000000..1bc76b30
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_generated/aio/_configuration.py
@@ -0,0 +1,46 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from typing import Any
+
+from azure.core.configuration import Configuration
+from azure.core.pipeline import policies
+
+VERSION = "unknown"
+
+
+class AzureMonitorClientConfiguration(Configuration): # pylint: disable=too-many-instance-attributes
+ """Configuration for AzureMonitorClient.
+
+ Note that all parameters used to create this instance are saved as instance
+ attributes.
+
+ :param host: Breeze endpoint: https://dc.services.visualstudio.com. Default value is
+ "https://dc.services.visualstudio.com".
+ :type host: str
+ """
+
+ def __init__(self, host: str = "https://dc.services.visualstudio.com", **kwargs: Any) -> None:
+ super(AzureMonitorClientConfiguration, self).__init__(**kwargs)
+ if host is None:
+ raise ValueError("Parameter 'host' must not be None.")
+
+ self.host = host
+ kwargs.setdefault("sdk_moniker", "azuremonitorclient/{}".format(VERSION))
+ self._configure(**kwargs)
+
+ def _configure(self, **kwargs: Any) -> None:
+ self.user_agent_policy = kwargs.get("user_agent_policy") or policies.UserAgentPolicy(**kwargs)
+ self.headers_policy = kwargs.get("headers_policy") or policies.HeadersPolicy(**kwargs)
+ self.proxy_policy = kwargs.get("proxy_policy") or policies.ProxyPolicy(**kwargs)
+ self.logging_policy = kwargs.get("logging_policy") or policies.NetworkTraceLoggingPolicy(**kwargs)
+ self.http_logging_policy = kwargs.get("http_logging_policy") or policies.HttpLoggingPolicy(**kwargs)
+ self.retry_policy = kwargs.get("retry_policy") or policies.AsyncRetryPolicy(**kwargs)
+ self.custom_hook_policy = kwargs.get("custom_hook_policy") or policies.CustomHookPolicy(**kwargs)
+ self.redirect_policy = kwargs.get("redirect_policy") or policies.AsyncRedirectPolicy(**kwargs)
+ self.authentication_policy = kwargs.get("authentication_policy")
diff --git a/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_generated/aio/_patch.py b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_generated/aio/_patch.py
new file mode 100644
index 00000000..17dbc073
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_generated/aio/_patch.py
@@ -0,0 +1,32 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+#
+# Copyright (c) Microsoft Corporation. All rights reserved.
+#
+# The MIT License (MIT)
+#
+# Permission is hereby granted, free of charge, to any person obtaining a copy
+# of this software and associated documentation files (the ""Software""), to
+# deal in the Software without restriction, including without limitation the
+# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+# sell copies of the Software, and to permit persons to whom the Software is
+# furnished to do so, subject to the following conditions:
+#
+# The above copyright notice and this permission notice shall be included in
+# all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+# IN THE SOFTWARE.
+#
+# --------------------------------------------------------------------------
+
+
+# This file is used for handwritten extensions to the generated code. Example:
+# https://github.com/Azure/azure-sdk-for-python/blob/main/doc/dev/customize_code/how-to-patch-sdk-code.md
+def patch_sdk():
+ pass
diff --git a/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_generated/aio/operations/__init__.py b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_generated/aio/operations/__init__.py
new file mode 100644
index 00000000..a1b6ce78
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_generated/aio/operations/__init__.py
@@ -0,0 +1,13 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from ._azure_monitor_client_operations import AzureMonitorClientOperationsMixin
+
+__all__ = [
+ "AzureMonitorClientOperationsMixin",
+]
diff --git a/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_generated/aio/operations/_azure_monitor_client_operations.py b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_generated/aio/operations/_azure_monitor_client_operations.py
new file mode 100644
index 00000000..72c70dab
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_generated/aio/operations/_azure_monitor_client_operations.py
@@ -0,0 +1,102 @@
+# pylint: disable=too-many-lines
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import Any, Callable, Dict, List, Optional, TypeVar
+
+from azure.core.exceptions import (
+ ClientAuthenticationError,
+ HttpResponseError,
+ ResourceExistsError,
+ ResourceNotFoundError,
+ map_error,
+)
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import AsyncHttpResponse
+from azure.core.rest import HttpRequest
+
+from ... import models as _models
+from ..._vendor import _convert_request
+from ...operations._azure_monitor_client_operations import build_track_request
+
+T = TypeVar("T")
+ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
+
+
+class AzureMonitorClientOperationsMixin:
+
+ async def track(self, body: List["_models.TelemetryItem"], **kwargs: Any) -> "_models.TrackResponse":
+ """Track telemetry events.
+
+ This operation sends a sequence of telemetry events that will be monitored by Azure Monitor.
+
+ :param body: The list of telemetry events to track.
+ :type body: list[~azure_monitor_client.models.TelemetryItem]
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: TrackResponse, or the result of cls(response)
+ :rtype: ~azure_monitor_client.models.TrackResponse
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop("cls", None) # type: ClsType["_models.TrackResponse"]
+ error_map = {
+ 401: ClientAuthenticationError,
+ 404: ResourceNotFoundError,
+ 409: ResourceExistsError,
+ 400: lambda response: HttpResponseError(
+ response=response, model=self._deserialize(_models.TrackResponse, response)
+ ),
+ 402: lambda response: HttpResponseError(
+ response=response, model=self._deserialize(_models.TrackResponse, response)
+ ),
+ 429: lambda response: HttpResponseError(
+ response=response, model=self._deserialize(_models.TrackResponse, response)
+ ),
+ 500: lambda response: HttpResponseError(
+ response=response, model=self._deserialize(_models.TrackResponse, response)
+ ),
+ 503: lambda response: HttpResponseError(
+ response=response, model=self._deserialize(_models.TrackResponse, response)
+ ),
+ }
+ error_map.update(kwargs.pop("error_map", {}))
+
+ content_type = kwargs.pop("content_type", "application/json") # type: Optional[str]
+
+ _json = self._serialize.body(body, "[TelemetryItem]")
+
+ request = build_track_request(
+ content_type=content_type,
+ json=_json,
+ template_url=self.track.metadata["url"],
+ )
+ request = _convert_request(request)
+ path_format_arguments = {
+ "Host": self._serialize.url("self._config.host", self._config.host, "str", skip_quote=True),
+ }
+ request.url = self._client.format_url(request.url, **path_format_arguments)
+
+ pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access
+ request, stream=False, **kwargs
+ )
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 206]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response)
+
+ if response.status_code == 200:
+ deserialized = self._deserialize("TrackResponse", pipeline_response)
+
+ if response.status_code == 206:
+ deserialized = self._deserialize("TrackResponse", pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+
+ track.metadata = {"url": "/track"} # type: ignore
diff --git a/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_generated/models/__init__.py b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_generated/models/__init__.py
new file mode 100644
index 00000000..2f29230e
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_generated/models/__init__.py
@@ -0,0 +1,73 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+try:
+ from ._models_py3 import AvailabilityData
+ from ._models_py3 import MessageData
+ from ._models_py3 import MetricDataPoint
+ from ._models_py3 import MetricsData
+ from ._models_py3 import MonitorBase
+ from ._models_py3 import MonitorDomain
+ from ._models_py3 import PageViewData
+ from ._models_py3 import PageViewPerfData
+ from ._models_py3 import RemoteDependencyData
+ from ._models_py3 import RequestData
+ from ._models_py3 import StackFrame
+ from ._models_py3 import TelemetryErrorDetails
+ from ._models_py3 import TelemetryEventData
+ from ._models_py3 import TelemetryExceptionData
+ from ._models_py3 import TelemetryExceptionDetails
+ from ._models_py3 import TelemetryItem
+ from ._models_py3 import TrackResponse
+except (SyntaxError, ImportError):
+ from ._models import AvailabilityData # type: ignore
+ from ._models import MessageData # type: ignore
+ from ._models import MetricDataPoint # type: ignore
+ from ._models import MetricsData # type: ignore
+ from ._models import MonitorBase # type: ignore
+ from ._models import MonitorDomain # type: ignore
+ from ._models import PageViewData # type: ignore
+ from ._models import PageViewPerfData # type: ignore
+ from ._models import RemoteDependencyData # type: ignore
+ from ._models import RequestData # type: ignore
+ from ._models import StackFrame # type: ignore
+ from ._models import TelemetryErrorDetails # type: ignore
+ from ._models import TelemetryEventData # type: ignore
+ from ._models import TelemetryExceptionData # type: ignore
+ from ._models import TelemetryExceptionDetails # type: ignore
+ from ._models import TelemetryItem # type: ignore
+ from ._models import TrackResponse # type: ignore
+
+from ._azure_monitor_client_enums import (
+ ContextTagKeys,
+ DataPointType,
+ SeverityLevel,
+)
+
+__all__ = [
+ "AvailabilityData",
+ "MessageData",
+ "MetricDataPoint",
+ "MetricsData",
+ "MonitorBase",
+ "MonitorDomain",
+ "PageViewData",
+ "PageViewPerfData",
+ "RemoteDependencyData",
+ "RequestData",
+ "StackFrame",
+ "TelemetryErrorDetails",
+ "TelemetryEventData",
+ "TelemetryExceptionData",
+ "TelemetryExceptionDetails",
+ "TelemetryItem",
+ "TrackResponse",
+ "ContextTagKeys",
+ "DataPointType",
+ "SeverityLevel",
+]
diff --git a/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_generated/models/_azure_monitor_client_enums.py b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_generated/models/_azure_monitor_client_enums.py
new file mode 100644
index 00000000..cb26c587
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_generated/models/_azure_monitor_client_enums.py
@@ -0,0 +1,60 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from enum import Enum
+from azure.core import CaseInsensitiveEnumMeta
+
+
+class ContextTagKeys(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ """The context tag keys."""
+
+ AI_APPLICATION_VER = "ai.application.ver"
+ AI_DEVICE_ID = "ai.device.id"
+ AI_DEVICE_LOCALE = "ai.device.locale"
+ AI_DEVICE_MODEL = "ai.device.model"
+ AI_DEVICE_OEM_NAME = "ai.device.oemName"
+ AI_DEVICE_OS_VERSION = "ai.device.osVersion"
+ AI_DEVICE_TYPE = "ai.device.type"
+ AI_LOCATION_IP = "ai.location.ip"
+ AI_LOCATION_COUNTRY = "ai.location.country"
+ AI_LOCATION_PROVINCE = "ai.location.province"
+ AI_LOCATION_CITY = "ai.location.city"
+ AI_OPERATION_ID = "ai.operation.id"
+ AI_OPERATION_NAME = "ai.operation.name"
+ AI_OPERATION_PARENT_ID = "ai.operation.parentId"
+ AI_OPERATION_SYNTHETIC_SOURCE = "ai.operation.syntheticSource"
+ AI_OPERATION_CORRELATION_VECTOR = "ai.operation.correlationVector"
+ AI_SESSION_ID = "ai.session.id"
+ AI_SESSION_IS_FIRST = "ai.session.isFirst"
+ AI_USER_ACCOUNT_ID = "ai.user.accountId"
+ AI_USER_ID = "ai.user.id"
+ AI_USER_AUTH_USER_ID = "ai.user.authUserId"
+ AI_CLOUD_ROLE = "ai.cloud.role"
+ AI_CLOUD_ROLE_VER = "ai.cloud.roleVer"
+ AI_CLOUD_ROLE_INSTANCE = "ai.cloud.roleInstance"
+ AI_CLOUD_LOCATION = "ai.cloud.location"
+ AI_INTERNAL_SDK_VERSION = "ai.internal.sdkVersion"
+ AI_INTERNAL_AGENT_VERSION = "ai.internal.agentVersion"
+ AI_INTERNAL_NODE_NAME = "ai.internal.nodeName"
+
+
+class DataPointType(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ """Type of the metric data measurement."""
+
+ MEASUREMENT = "Measurement"
+ AGGREGATION = "Aggregation"
+
+
+class SeverityLevel(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ """Defines the level of severity for the event."""
+
+ VERBOSE = "Verbose"
+ INFORMATION = "Information"
+ WARNING = "Warning"
+ ERROR = "Error"
+ CRITICAL = "Critical"
diff --git a/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_generated/models/_models.py b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_generated/models/_models.py
new file mode 100644
index 00000000..ceaf6119
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_generated/models/_models.py
@@ -0,0 +1,1167 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from azure.core.exceptions import HttpResponseError
+import msrest.serialization
+
+
+class MonitorDomain(msrest.serialization.Model):
+ """The abstract common base of all domains.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :ivar additional_properties: Unmatched properties from the message are deserialized to this
+ collection.
+ :vartype additional_properties: dict[str, any]
+ :ivar version: Required. Schema version.
+ :vartype version: int
+ """
+
+ _validation = {
+ "version": {"required": True},
+ }
+
+ _attribute_map = {
+ "additional_properties": {"key": "", "type": "{object}"},
+ "version": {"key": "ver", "type": "int"},
+ }
+
+ def __init__(self, **kwargs):
+ """
+ :keyword additional_properties: Unmatched properties from the message are deserialized to this
+ collection.
+ :paramtype additional_properties: dict[str, any]
+ :keyword version: Required. Schema version.
+ :paramtype version: int
+ """
+ super(MonitorDomain, self).__init__(**kwargs)
+ self.additional_properties = kwargs.get("additional_properties", None)
+ self.version = kwargs.get("version", 2)
+
+
+class AvailabilityData(MonitorDomain):
+ """Instances of AvailabilityData represent the result of executing an availability test.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :ivar additional_properties: Unmatched properties from the message are deserialized to this
+ collection.
+ :vartype additional_properties: dict[str, any]
+ :ivar version: Required. Schema version.
+ :vartype version: int
+ :ivar id: Required. Identifier of a test run. Use it to correlate steps of test run and
+ telemetry generated by the service.
+ :vartype id: str
+ :ivar name: Required. Name of the test that these availability results represent.
+ :vartype name: str
+ :ivar duration: Required. Duration in format: DD.HH:MM:SS.MMMMMM. Must be less than 1000 days.
+ :vartype duration: str
+ :ivar success: Required. Success flag.
+ :vartype success: bool
+ :ivar run_location: Name of the location where the test was run from.
+ :vartype run_location: str
+ :ivar message: Diagnostic message for the result.
+ :vartype message: str
+ :ivar properties: Collection of custom properties.
+ :vartype properties: dict[str, str]
+ :ivar measurements: Collection of custom measurements.
+ :vartype measurements: dict[str, float]
+ """
+
+ _validation = {
+ "version": {"required": True},
+ "id": {"required": True, "max_length": 512, "min_length": 0},
+ "name": {"required": True, "max_length": 1024, "min_length": 0},
+ "duration": {"required": True},
+ "success": {"required": True},
+ "run_location": {"max_length": 1024, "min_length": 0},
+ "message": {"max_length": 8192, "min_length": 0},
+ }
+
+ _attribute_map = {
+ "additional_properties": {"key": "", "type": "{object}"},
+ "version": {"key": "ver", "type": "int"},
+ "id": {"key": "id", "type": "str"},
+ "name": {"key": "name", "type": "str"},
+ "duration": {"key": "duration", "type": "str"},
+ "success": {"key": "success", "type": "bool"},
+ "run_location": {"key": "runLocation", "type": "str"},
+ "message": {"key": "message", "type": "str"},
+ "properties": {"key": "properties", "type": "{str}"},
+ "measurements": {"key": "measurements", "type": "{float}"},
+ }
+
+ def __init__(self, **kwargs):
+ """
+ :keyword additional_properties: Unmatched properties from the message are deserialized to this
+ collection.
+ :paramtype additional_properties: dict[str, any]
+ :keyword version: Required. Schema version.
+ :paramtype version: int
+ :keyword id: Required. Identifier of a test run. Use it to correlate steps of test run and
+ telemetry generated by the service.
+ :paramtype id: str
+ :keyword name: Required. Name of the test that these availability results represent.
+ :paramtype name: str
+ :keyword duration: Required. Duration in format: DD.HH:MM:SS.MMMMMM. Must be less than 1000
+ days.
+ :paramtype duration: str
+ :keyword success: Required. Success flag.
+ :paramtype success: bool
+ :keyword run_location: Name of the location where the test was run from.
+ :paramtype run_location: str
+ :keyword message: Diagnostic message for the result.
+ :paramtype message: str
+ :keyword properties: Collection of custom properties.
+ :paramtype properties: dict[str, str]
+ :keyword measurements: Collection of custom measurements.
+ :paramtype measurements: dict[str, float]
+ """
+ super(AvailabilityData, self).__init__(**kwargs)
+ self.id = kwargs["id"]
+ self.name = kwargs["name"]
+ self.duration = kwargs["duration"]
+ self.success = kwargs["success"]
+ self.run_location = kwargs.get("run_location", None)
+ self.message = kwargs.get("message", None)
+ self.properties = kwargs.get("properties", None)
+ self.measurements = kwargs.get("measurements", None)
+
+
+class MessageData(MonitorDomain):
+ """Instances of Message represent printf-like trace statements that are text-searched. Log4Net, NLog and other text-based log file entries are translated into instances of this type. The message does not have measurements.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :ivar additional_properties: Unmatched properties from the message are deserialized to this
+ collection.
+ :vartype additional_properties: dict[str, any]
+ :ivar version: Required. Schema version.
+ :vartype version: int
+ :ivar message: Required. Trace message.
+ :vartype message: str
+ :ivar severity_level: Trace severity level. Possible values include: "Verbose", "Information",
+ "Warning", "Error", "Critical".
+ :vartype severity_level: str or ~azure_monitor_client.models.SeverityLevel
+ :ivar properties: Collection of custom properties.
+ :vartype properties: dict[str, str]
+ :ivar measurements: Collection of custom measurements.
+ :vartype measurements: dict[str, float]
+ """
+
+ _validation = {
+ "version": {"required": True},
+ "message": {"required": True, "max_length": 32768, "min_length": 0},
+ }
+
+ _attribute_map = {
+ "additional_properties": {"key": "", "type": "{object}"},
+ "version": {"key": "ver", "type": "int"},
+ "message": {"key": "message", "type": "str"},
+ "severity_level": {"key": "severityLevel", "type": "str"},
+ "properties": {"key": "properties", "type": "{str}"},
+ "measurements": {"key": "measurements", "type": "{float}"},
+ }
+
+ def __init__(self, **kwargs):
+ """
+ :keyword additional_properties: Unmatched properties from the message are deserialized to this
+ collection.
+ :paramtype additional_properties: dict[str, any]
+ :keyword version: Required. Schema version.
+ :paramtype version: int
+ :keyword message: Required. Trace message.
+ :paramtype message: str
+ :keyword severity_level: Trace severity level. Possible values include: "Verbose",
+ "Information", "Warning", "Error", "Critical".
+ :paramtype severity_level: str or ~azure_monitor_client.models.SeverityLevel
+ :keyword properties: Collection of custom properties.
+ :paramtype properties: dict[str, str]
+ :keyword measurements: Collection of custom measurements.
+ :paramtype measurements: dict[str, float]
+ """
+ super(MessageData, self).__init__(**kwargs)
+ self.message = kwargs["message"]
+ self.severity_level = kwargs.get("severity_level", None)
+ self.properties = kwargs.get("properties", None)
+ self.measurements = kwargs.get("measurements", None)
+
+
+class MetricDataPoint(msrest.serialization.Model):
+ """Metric data single measurement.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :ivar namespace: Namespace of the metric.
+ :vartype namespace: str
+ :ivar name: Required. Name of the metric.
+ :vartype name: str
+ :ivar data_point_type: Metric type. Single measurement or the aggregated value. Possible values
+ include: "Measurement", "Aggregation".
+ :vartype data_point_type: str or ~azure_monitor_client.models.DataPointType
+ :ivar value: Required. Single value for measurement. Sum of individual measurements for the
+ aggregation.
+ :vartype value: float
+ :ivar count: Metric weight of the aggregated metric. Should not be set for a measurement.
+ :vartype count: int
+ :ivar min: Minimum value of the aggregated metric. Should not be set for a measurement.
+ :vartype min: float
+ :ivar max: Maximum value of the aggregated metric. Should not be set for a measurement.
+ :vartype max: float
+ :ivar std_dev: Standard deviation of the aggregated metric. Should not be set for a
+ measurement.
+ :vartype std_dev: float
+ """
+
+ _validation = {
+ "namespace": {"max_length": 256, "min_length": 0},
+ "name": {"required": True, "max_length": 1024, "min_length": 0},
+ "value": {"required": True},
+ }
+
+ _attribute_map = {
+ "namespace": {"key": "ns", "type": "str"},
+ "name": {"key": "name", "type": "str"},
+ "data_point_type": {"key": "kind", "type": "str"},
+ "value": {"key": "value", "type": "float"},
+ "count": {"key": "count", "type": "int"},
+ "min": {"key": "min", "type": "float"},
+ "max": {"key": "max", "type": "float"},
+ "std_dev": {"key": "stdDev", "type": "float"},
+ }
+
+ def __init__(self, **kwargs):
+ """
+ :keyword namespace: Namespace of the metric.
+ :paramtype namespace: str
+ :keyword name: Required. Name of the metric.
+ :paramtype name: str
+ :keyword data_point_type: Metric type. Single measurement or the aggregated value. Possible
+ values include: "Measurement", "Aggregation".
+ :paramtype data_point_type: str or ~azure_monitor_client.models.DataPointType
+ :keyword value: Required. Single value for measurement. Sum of individual measurements for the
+ aggregation.
+ :paramtype value: float
+ :keyword count: Metric weight of the aggregated metric. Should not be set for a measurement.
+ :paramtype count: int
+ :keyword min: Minimum value of the aggregated metric. Should not be set for a measurement.
+ :paramtype min: float
+ :keyword max: Maximum value of the aggregated metric. Should not be set for a measurement.
+ :paramtype max: float
+ :keyword std_dev: Standard deviation of the aggregated metric. Should not be set for a
+ measurement.
+ :paramtype std_dev: float
+ """
+ super(MetricDataPoint, self).__init__(**kwargs)
+ self.namespace = kwargs.get("namespace", None)
+ self.name = kwargs["name"]
+ self.data_point_type = kwargs.get("data_point_type", None)
+ self.value = kwargs["value"]
+ self.count = kwargs.get("count", None)
+ self.min = kwargs.get("min", None)
+ self.max = kwargs.get("max", None)
+ self.std_dev = kwargs.get("std_dev", None)
+
+
+class MetricsData(MonitorDomain):
+ """An instance of the Metric item is a list of measurements (single data points) and/or aggregations.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :ivar additional_properties: Unmatched properties from the message are deserialized to this
+ collection.
+ :vartype additional_properties: dict[str, any]
+ :ivar version: Required. Schema version.
+ :vartype version: int
+ :ivar metrics: Required. List of metrics. Only one metric in the list is currently supported by
+ Application Insights storage. If multiple data points were sent only the first one will be
+ used.
+ :vartype metrics: list[~azure_monitor_client.models.MetricDataPoint]
+ :ivar properties: Collection of custom properties.
+ :vartype properties: dict[str, str]
+ """
+
+ _validation = {
+ "version": {"required": True},
+ "metrics": {"required": True},
+ }
+
+ _attribute_map = {
+ "additional_properties": {"key": "", "type": "{object}"},
+ "version": {"key": "ver", "type": "int"},
+ "metrics": {"key": "metrics", "type": "[MetricDataPoint]"},
+ "properties": {"key": "properties", "type": "{str}"},
+ }
+
+ def __init__(self, **kwargs):
+ """
+ :keyword additional_properties: Unmatched properties from the message are deserialized to this
+ collection.
+ :paramtype additional_properties: dict[str, any]
+ :keyword version: Required. Schema version.
+ :paramtype version: int
+ :keyword metrics: Required. List of metrics. Only one metric in the list is currently supported
+ by Application Insights storage. If multiple data points were sent only the first one will be
+ used.
+ :paramtype metrics: list[~azure_monitor_client.models.MetricDataPoint]
+ :keyword properties: Collection of custom properties.
+ :paramtype properties: dict[str, str]
+ """
+ super(MetricsData, self).__init__(**kwargs)
+ self.metrics = kwargs["metrics"]
+ self.properties = kwargs.get("properties", None)
+
+
+class MonitorBase(msrest.serialization.Model):
+ """Data struct to contain only C section with custom fields.
+
+ :ivar base_type: Name of item (B section) if any. If telemetry data is derived straight from
+ this, this should be null.
+ :vartype base_type: str
+ :ivar base_data: The data payload for the telemetry request.
+ :vartype base_data: ~azure_monitor_client.models.MonitorDomain
+ """
+
+ _attribute_map = {
+ "base_type": {"key": "baseType", "type": "str"},
+ "base_data": {"key": "baseData", "type": "MonitorDomain"},
+ }
+
+ def __init__(self, **kwargs):
+ """
+ :keyword base_type: Name of item (B section) if any. If telemetry data is derived straight from
+ this, this should be null.
+ :paramtype base_type: str
+ :keyword base_data: The data payload for the telemetry request.
+ :paramtype base_data: ~azure_monitor_client.models.MonitorDomain
+ """
+ super(MonitorBase, self).__init__(**kwargs)
+ self.base_type = kwargs.get("base_type", None)
+ self.base_data = kwargs.get("base_data", None)
+
+
+class PageViewData(MonitorDomain):
+ """An instance of PageView represents a generic action on a page like a button click. It is also the base type for PageView.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :ivar additional_properties: Unmatched properties from the message are deserialized to this
+ collection.
+ :vartype additional_properties: dict[str, any]
+ :ivar version: Required. Schema version.
+ :vartype version: int
+ :ivar id: Required. Identifier of a page view instance. Used for correlation between page view
+ and other telemetry items.
+ :vartype id: str
+ :ivar name: Required. Event name. Keep it low cardinality to allow proper grouping and useful
+ metrics.
+ :vartype name: str
+ :ivar url: Request URL with all query string parameters.
+ :vartype url: str
+ :ivar duration: Request duration in format: DD.HH:MM:SS.MMMMMM. For a page view (PageViewData),
+ this is the duration. For a page view with performance information (PageViewPerfData), this is
+ the page load time. Must be less than 1000 days.
+ :vartype duration: str
+ :ivar referred_uri: Fully qualified page URI or URL of the referring page; if unknown, leave
+ blank.
+ :vartype referred_uri: str
+ :ivar properties: Collection of custom properties.
+ :vartype properties: dict[str, str]
+ :ivar measurements: Collection of custom measurements.
+ :vartype measurements: dict[str, float]
+ """
+
+ _validation = {
+ "version": {"required": True},
+ "id": {"required": True, "max_length": 512, "min_length": 0},
+ "name": {"required": True, "max_length": 1024, "min_length": 0},
+ "url": {"max_length": 2048, "min_length": 0},
+ "referred_uri": {"max_length": 2048, "min_length": 0},
+ }
+
+ _attribute_map = {
+ "additional_properties": {"key": "", "type": "{object}"},
+ "version": {"key": "ver", "type": "int"},
+ "id": {"key": "id", "type": "str"},
+ "name": {"key": "name", "type": "str"},
+ "url": {"key": "url", "type": "str"},
+ "duration": {"key": "duration", "type": "str"},
+ "referred_uri": {"key": "referredUri", "type": "str"},
+ "properties": {"key": "properties", "type": "{str}"},
+ "measurements": {"key": "measurements", "type": "{float}"},
+ }
+
+ def __init__(self, **kwargs):
+ """
+ :keyword additional_properties: Unmatched properties from the message are deserialized to this
+ collection.
+ :paramtype additional_properties: dict[str, any]
+ :keyword version: Required. Schema version.
+ :paramtype version: int
+ :keyword id: Required. Identifier of a page view instance. Used for correlation between page
+ view and other telemetry items.
+ :paramtype id: str
+ :keyword name: Required. Event name. Keep it low cardinality to allow proper grouping and
+ useful metrics.
+ :paramtype name: str
+ :keyword url: Request URL with all query string parameters.
+ :paramtype url: str
+ :keyword duration: Request duration in format: DD.HH:MM:SS.MMMMMM. For a page view
+ (PageViewData), this is the duration. For a page view with performance information
+ (PageViewPerfData), this is the page load time. Must be less than 1000 days.
+ :paramtype duration: str
+ :keyword referred_uri: Fully qualified page URI or URL of the referring page; if unknown, leave
+ blank.
+ :paramtype referred_uri: str
+ :keyword properties: Collection of custom properties.
+ :paramtype properties: dict[str, str]
+ :keyword measurements: Collection of custom measurements.
+ :paramtype measurements: dict[str, float]
+ """
+ super(PageViewData, self).__init__(**kwargs)
+ self.id = kwargs["id"]
+ self.name = kwargs["name"]
+ self.url = kwargs.get("url", None)
+ self.duration = kwargs.get("duration", None)
+ self.referred_uri = kwargs.get("referred_uri", None)
+ self.properties = kwargs.get("properties", None)
+ self.measurements = kwargs.get("measurements", None)
+
+
+class PageViewPerfData(MonitorDomain):
+ """An instance of PageViewPerf represents: a page view with no performance data, a page view with performance data, or just the performance data of an earlier page request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :ivar additional_properties: Unmatched properties from the message are deserialized to this
+ collection.
+ :vartype additional_properties: dict[str, any]
+ :ivar version: Required. Schema version.
+ :vartype version: int
+ :ivar id: Required. Identifier of a page view instance. Used for correlation between page view
+ and other telemetry items.
+ :vartype id: str
+ :ivar name: Required. Event name. Keep it low cardinality to allow proper grouping and useful
+ metrics.
+ :vartype name: str
+ :ivar url: Request URL with all query string parameters.
+ :vartype url: str
+ :ivar duration: Request duration in format: DD.HH:MM:SS.MMMMMM. For a page view (PageViewData),
+ this is the duration. For a page view with performance information (PageViewPerfData), this is
+ the page load time. Must be less than 1000 days.
+ :vartype duration: str
+ :ivar perf_total: Performance total in TimeSpan 'G' (general long) format: d:hh:mm:ss.fffffff.
+ :vartype perf_total: str
+ :ivar network_connect: Network connection time in TimeSpan 'G' (general long) format:
+ d:hh:mm:ss.fffffff.
+ :vartype network_connect: str
+ :ivar sent_request: Sent request time in TimeSpan 'G' (general long) format:
+ d:hh:mm:ss.fffffff.
+ :vartype sent_request: str
+ :ivar received_response: Received response time in TimeSpan 'G' (general long) format:
+ d:hh:mm:ss.fffffff.
+ :vartype received_response: str
+ :ivar dom_processing: DOM processing time in TimeSpan 'G' (general long) format:
+ d:hh:mm:ss.fffffff.
+ :vartype dom_processing: str
+ :ivar properties: Collection of custom properties.
+ :vartype properties: dict[str, str]
+ :ivar measurements: Collection of custom measurements.
+ :vartype measurements: dict[str, float]
+ """
+
+ _validation = {
+ "version": {"required": True},
+ "id": {"required": True, "max_length": 512, "min_length": 0},
+ "name": {"required": True, "max_length": 1024, "min_length": 0},
+ "url": {"max_length": 2048, "min_length": 0},
+ }
+
+ _attribute_map = {
+ "additional_properties": {"key": "", "type": "{object}"},
+ "version": {"key": "ver", "type": "int"},
+ "id": {"key": "id", "type": "str"},
+ "name": {"key": "name", "type": "str"},
+ "url": {"key": "url", "type": "str"},
+ "duration": {"key": "duration", "type": "str"},
+ "perf_total": {"key": "perfTotal", "type": "str"},
+ "network_connect": {"key": "networkConnect", "type": "str"},
+ "sent_request": {"key": "sentRequest", "type": "str"},
+ "received_response": {"key": "receivedResponse", "type": "str"},
+ "dom_processing": {"key": "domProcessing", "type": "str"},
+ "properties": {"key": "properties", "type": "{str}"},
+ "measurements": {"key": "measurements", "type": "{float}"},
+ }
+
+ def __init__(self, **kwargs):
+ """
+ :keyword additional_properties: Unmatched properties from the message are deserialized to this
+ collection.
+ :paramtype additional_properties: dict[str, any]
+ :keyword version: Required. Schema version.
+ :paramtype version: int
+ :keyword id: Required. Identifier of a page view instance. Used for correlation between page
+ view and other telemetry items.
+ :paramtype id: str
+ :keyword name: Required. Event name. Keep it low cardinality to allow proper grouping and
+ useful metrics.
+ :paramtype name: str
+ :keyword url: Request URL with all query string parameters.
+ :paramtype url: str
+ :keyword duration: Request duration in format: DD.HH:MM:SS.MMMMMM. For a page view
+ (PageViewData), this is the duration. For a page view with performance information
+ (PageViewPerfData), this is the page load time. Must be less than 1000 days.
+ :paramtype duration: str
+ :keyword perf_total: Performance total in TimeSpan 'G' (general long) format:
+ d:hh:mm:ss.fffffff.
+ :paramtype perf_total: str
+ :keyword network_connect: Network connection time in TimeSpan 'G' (general long) format:
+ d:hh:mm:ss.fffffff.
+ :paramtype network_connect: str
+ :keyword sent_request: Sent request time in TimeSpan 'G' (general long) format:
+ d:hh:mm:ss.fffffff.
+ :paramtype sent_request: str
+ :keyword received_response: Received response time in TimeSpan 'G' (general long) format:
+ d:hh:mm:ss.fffffff.
+ :paramtype received_response: str
+ :keyword dom_processing: DOM processing time in TimeSpan 'G' (general long) format:
+ d:hh:mm:ss.fffffff.
+ :paramtype dom_processing: str
+ :keyword properties: Collection of custom properties.
+ :paramtype properties: dict[str, str]
+ :keyword measurements: Collection of custom measurements.
+ :paramtype measurements: dict[str, float]
+ """
+ super(PageViewPerfData, self).__init__(**kwargs)
+ self.id = kwargs["id"]
+ self.name = kwargs["name"]
+ self.url = kwargs.get("url", None)
+ self.duration = kwargs.get("duration", None)
+ self.perf_total = kwargs.get("perf_total", None)
+ self.network_connect = kwargs.get("network_connect", None)
+ self.sent_request = kwargs.get("sent_request", None)
+ self.received_response = kwargs.get("received_response", None)
+ self.dom_processing = kwargs.get("dom_processing", None)
+ self.properties = kwargs.get("properties", None)
+ self.measurements = kwargs.get("measurements", None)
+
+
+class RemoteDependencyData(MonitorDomain):
+ """An instance of Remote Dependency represents an interaction of the monitored component with a remote component/service like SQL or an HTTP endpoint.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :ivar additional_properties: Unmatched properties from the message are deserialized to this
+ collection.
+ :vartype additional_properties: dict[str, any]
+ :ivar version: Required. Schema version.
+ :vartype version: int
+ :ivar id: Identifier of a dependency call instance. Used for correlation with the request
+ telemetry item corresponding to this dependency call.
+ :vartype id: str
+ :ivar name: Required. Name of the command initiated with this dependency call. Low cardinality
+ value. Examples are stored procedure name and URL path template.
+ :vartype name: str
+ :ivar result_code: Result code of a dependency call. Examples are SQL error code and HTTP
+ status code.
+ :vartype result_code: str
+ :ivar data: Command initiated by this dependency call. Examples are SQL statement and HTTP URL
+ with all query parameters.
+ :vartype data: str
+ :ivar type: Dependency type name. Very low cardinality value for logical grouping of
+ dependencies and interpretation of other fields like commandName and resultCode. Examples are
+ SQL, Azure table, and HTTP.
+ :vartype type: str
+ :ivar target: Target site of a dependency call. Examples are server name, host address.
+ :vartype target: str
+ :ivar duration: Required. Request duration in format: DD.HH:MM:SS.MMMMMM. Must be less than
+ 1000 days.
+ :vartype duration: str
+ :ivar success: Indication of successful or unsuccessful call.
+ :vartype success: bool
+ :ivar properties: Collection of custom properties.
+ :vartype properties: dict[str, str]
+ :ivar measurements: Collection of custom measurements.
+ :vartype measurements: dict[str, float]
+ """
+
+ _validation = {
+ "version": {"required": True},
+ "id": {"max_length": 512, "min_length": 0},
+ "name": {"required": True, "max_length": 1024, "min_length": 0},
+ "result_code": {"max_length": 1024, "min_length": 0},
+ "data": {"max_length": 8192, "min_length": 0},
+ "type": {"max_length": 1024, "min_length": 0},
+ "target": {"max_length": 1024, "min_length": 0},
+ "duration": {"required": True},
+ }
+
+ _attribute_map = {
+ "additional_properties": {"key": "", "type": "{object}"},
+ "version": {"key": "ver", "type": "int"},
+ "id": {"key": "id", "type": "str"},
+ "name": {"key": "name", "type": "str"},
+ "result_code": {"key": "resultCode", "type": "str"},
+ "data": {"key": "data", "type": "str"},
+ "type": {"key": "type", "type": "str"},
+ "target": {"key": "target", "type": "str"},
+ "duration": {"key": "duration", "type": "str"},
+ "success": {"key": "success", "type": "bool"},
+ "properties": {"key": "properties", "type": "{str}"},
+ "measurements": {"key": "measurements", "type": "{float}"},
+ }
+
+ def __init__(self, **kwargs):
+ """
+ :keyword additional_properties: Unmatched properties from the message are deserialized to this
+ collection.
+ :paramtype additional_properties: dict[str, any]
+ :keyword version: Required. Schema version.
+ :paramtype version: int
+ :keyword id: Identifier of a dependency call instance. Used for correlation with the request
+ telemetry item corresponding to this dependency call.
+ :paramtype id: str
+ :keyword name: Required. Name of the command initiated with this dependency call. Low
+ cardinality value. Examples are stored procedure name and URL path template.
+ :paramtype name: str
+ :keyword result_code: Result code of a dependency call. Examples are SQL error code and HTTP
+ status code.
+ :paramtype result_code: str
+ :keyword data: Command initiated by this dependency call. Examples are SQL statement and HTTP
+ URL with all query parameters.
+ :paramtype data: str
+ :keyword type: Dependency type name. Very low cardinality value for logical grouping of
+ dependencies and interpretation of other fields like commandName and resultCode. Examples are
+ SQL, Azure table, and HTTP.
+ :paramtype type: str
+ :keyword target: Target site of a dependency call. Examples are server name, host address.
+ :paramtype target: str
+ :keyword duration: Required. Request duration in format: DD.HH:MM:SS.MMMMMM. Must be less than
+ 1000 days.
+ :paramtype duration: str
+ :keyword success: Indication of successful or unsuccessful call.
+ :paramtype success: bool
+ :keyword properties: Collection of custom properties.
+ :paramtype properties: dict[str, str]
+ :keyword measurements: Collection of custom measurements.
+ :paramtype measurements: dict[str, float]
+ """
+ super(RemoteDependencyData, self).__init__(**kwargs)
+ self.id = kwargs.get("id", None)
+ self.name = kwargs["name"]
+ self.result_code = kwargs.get("result_code", None)
+ self.data = kwargs.get("data", None)
+ self.type = kwargs.get("type", None)
+ self.target = kwargs.get("target", None)
+ self.duration = kwargs["duration"]
+ self.success = kwargs.get("success", True)
+ self.properties = kwargs.get("properties", None)
+ self.measurements = kwargs.get("measurements", None)
+
+
+class RequestData(MonitorDomain):
+ """An instance of Request represents completion of an external request to the application to do work and contains a summary of that request execution and the results.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :ivar additional_properties: Unmatched properties from the message are deserialized to this
+ collection.
+ :vartype additional_properties: dict[str, any]
+ :ivar version: Required. Schema version.
+ :vartype version: int
+ :ivar id: Required. Identifier of a request call instance. Used for correlation between request
+ and other telemetry items.
+ :vartype id: str
+ :ivar name: Name of the request. Represents code path taken to process request. Low cardinality
+ value to allow better grouping of requests. For HTTP requests it represents the HTTP method and
+ URL path template like 'GET /values/{id}'.
+ :vartype name: str
+ :ivar duration: Required. Request duration in format: DD.HH:MM:SS.MMMMMM. Must be less than
+ 1000 days.
+ :vartype duration: str
+ :ivar success: Required. Indication of successful or unsuccessful call.
+ :vartype success: bool
+ :ivar response_code: Required. Result of a request execution. HTTP status code for HTTP
+ requests.
+ :vartype response_code: str
+ :ivar source: Source of the request. Examples are the instrumentation key of the caller or the
+ ip address of the caller.
+ :vartype source: str
+ :ivar url: Request URL with all query string parameters.
+ :vartype url: str
+ :ivar properties: Collection of custom properties.
+ :vartype properties: dict[str, str]
+ :ivar measurements: Collection of custom measurements.
+ :vartype measurements: dict[str, float]
+ """
+
+ _validation = {
+ "version": {"required": True},
+ "id": {"required": True, "max_length": 512, "min_length": 0},
+ "name": {"max_length": 1024, "min_length": 0},
+ "duration": {"required": True},
+ "success": {"required": True},
+ "response_code": {"required": True, "max_length": 1024, "min_length": 0},
+ "source": {"max_length": 1024, "min_length": 0},
+ "url": {"max_length": 2048, "min_length": 0},
+ }
+
+ _attribute_map = {
+ "additional_properties": {"key": "", "type": "{object}"},
+ "version": {"key": "ver", "type": "int"},
+ "id": {"key": "id", "type": "str"},
+ "name": {"key": "name", "type": "str"},
+ "duration": {"key": "duration", "type": "str"},
+ "success": {"key": "success", "type": "bool"},
+ "response_code": {"key": "responseCode", "type": "str"},
+ "source": {"key": "source", "type": "str"},
+ "url": {"key": "url", "type": "str"},
+ "properties": {"key": "properties", "type": "{str}"},
+ "measurements": {"key": "measurements", "type": "{float}"},
+ }
+
+ def __init__(self, **kwargs):
+ """
+ :keyword additional_properties: Unmatched properties from the message are deserialized to this
+ collection.
+ :paramtype additional_properties: dict[str, any]
+ :keyword version: Required. Schema version.
+ :paramtype version: int
+ :keyword id: Required. Identifier of a request call instance. Used for correlation between
+ request and other telemetry items.
+ :paramtype id: str
+ :keyword name: Name of the request. Represents code path taken to process request. Low
+ cardinality value to allow better grouping of requests. For HTTP requests it represents the
+ HTTP method and URL path template like 'GET /values/{id}'.
+ :paramtype name: str
+ :keyword duration: Required. Request duration in format: DD.HH:MM:SS.MMMMMM. Must be less than
+ 1000 days.
+ :paramtype duration: str
+ :keyword success: Required. Indication of successful or unsuccessful call.
+ :paramtype success: bool
+ :keyword response_code: Required. Result of a request execution. HTTP status code for HTTP
+ requests.
+ :paramtype response_code: str
+ :keyword source: Source of the request. Examples are the instrumentation key of the caller or
+ the ip address of the caller.
+ :paramtype source: str
+ :keyword url: Request URL with all query string parameters.
+ :paramtype url: str
+ :keyword properties: Collection of custom properties.
+ :paramtype properties: dict[str, str]
+ :keyword measurements: Collection of custom measurements.
+ :paramtype measurements: dict[str, float]
+ """
+ super(RequestData, self).__init__(**kwargs)
+ self.id = kwargs["id"]
+ self.name = kwargs.get("name", None)
+ self.duration = kwargs["duration"]
+ self.success = kwargs.get("success", True)
+ self.response_code = kwargs["response_code"]
+ self.source = kwargs.get("source", None)
+ self.url = kwargs.get("url", None)
+ self.properties = kwargs.get("properties", None)
+ self.measurements = kwargs.get("measurements", None)
+
+
+class StackFrame(msrest.serialization.Model):
+ """Stack frame information.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :ivar level: Required.
+ :vartype level: int
+ :ivar method: Required. Method name.
+ :vartype method: str
+ :ivar assembly: Name of the assembly (dll, jar, etc.) containing this function.
+ :vartype assembly: str
+ :ivar file_name: File name or URL of the method implementation.
+ :vartype file_name: str
+ :ivar line: Line number of the code implementation.
+ :vartype line: int
+ """
+
+ _validation = {
+ "level": {"required": True},
+ "method": {"required": True, "max_length": 1024, "min_length": 0},
+ "assembly": {"max_length": 1024, "min_length": 0},
+ "file_name": {"max_length": 1024, "min_length": 0},
+ }
+
+ _attribute_map = {
+ "level": {"key": "level", "type": "int"},
+ "method": {"key": "method", "type": "str"},
+ "assembly": {"key": "assembly", "type": "str"},
+ "file_name": {"key": "fileName", "type": "str"},
+ "line": {"key": "line", "type": "int"},
+ }
+
+ def __init__(self, **kwargs):
+ """
+ :keyword level: Required.
+ :paramtype level: int
+ :keyword method: Required. Method name.
+ :paramtype method: str
+ :keyword assembly: Name of the assembly (dll, jar, etc.) containing this function.
+ :paramtype assembly: str
+ :keyword file_name: File name or URL of the method implementation.
+ :paramtype file_name: str
+ :keyword line: Line number of the code implementation.
+ :paramtype line: int
+ """
+ super(StackFrame, self).__init__(**kwargs)
+ self.level = kwargs["level"]
+ self.method = kwargs["method"]
+ self.assembly = kwargs.get("assembly", None)
+ self.file_name = kwargs.get("file_name", None)
+ self.line = kwargs.get("line", None)
+
+
+class TelemetryErrorDetails(msrest.serialization.Model):
+ """The error details.
+
+ :ivar index: The index in the original payload of the item.
+ :vartype index: int
+ :ivar status_code: The item specific `HTTP Response status code <#Response Status Codes>`_.
+ :vartype status_code: int
+ :ivar message: The error message.
+ :vartype message: str
+ """
+
+ _attribute_map = {
+ "index": {"key": "index", "type": "int"},
+ "status_code": {"key": "statusCode", "type": "int"},
+ "message": {"key": "message", "type": "str"},
+ }
+
+ def __init__(self, **kwargs):
+ """
+ :keyword index: The index in the original payload of the item.
+ :paramtype index: int
+ :keyword status_code: The item specific `HTTP Response status code <#Response Status Codes>`_.
+ :paramtype status_code: int
+ :keyword message: The error message.
+ :paramtype message: str
+ """
+ super(TelemetryErrorDetails, self).__init__(**kwargs)
+ self.index = kwargs.get("index", None)
+ self.status_code = kwargs.get("status_code", None)
+ self.message = kwargs.get("message", None)
+
+
+class TelemetryEventData(MonitorDomain):
+ """Instances of Event represent structured event records that can be grouped and searched by their properties. Event data item also creates a metric of event count by name.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :ivar additional_properties: Unmatched properties from the message are deserialized to this
+ collection.
+ :vartype additional_properties: dict[str, any]
+ :ivar version: Required. Schema version.
+ :vartype version: int
+ :ivar name: Required. Event name. Keep it low cardinality to allow proper grouping and useful
+ metrics.
+ :vartype name: str
+ :ivar properties: Collection of custom properties.
+ :vartype properties: dict[str, str]
+ :ivar measurements: Collection of custom measurements.
+ :vartype measurements: dict[str, float]
+ """
+
+ _validation = {
+ "version": {"required": True},
+ "name": {"required": True, "max_length": 512, "min_length": 0},
+ }
+
+ _attribute_map = {
+ "additional_properties": {"key": "", "type": "{object}"},
+ "version": {"key": "ver", "type": "int"},
+ "name": {"key": "name", "type": "str"},
+ "properties": {"key": "properties", "type": "{str}"},
+ "measurements": {"key": "measurements", "type": "{float}"},
+ }
+
+ def __init__(self, **kwargs):
+ """
+ :keyword additional_properties: Unmatched properties from the message are deserialized to this
+ collection.
+ :paramtype additional_properties: dict[str, any]
+ :keyword version: Required. Schema version.
+ :paramtype version: int
+ :keyword name: Required. Event name. Keep it low cardinality to allow proper grouping and
+ useful metrics.
+ :paramtype name: str
+ :keyword properties: Collection of custom properties.
+ :paramtype properties: dict[str, str]
+ :keyword measurements: Collection of custom measurements.
+ :paramtype measurements: dict[str, float]
+ """
+ super(TelemetryEventData, self).__init__(**kwargs)
+ self.name = kwargs["name"]
+ self.properties = kwargs.get("properties", None)
+ self.measurements = kwargs.get("measurements", None)
+
+
+class TelemetryExceptionData(MonitorDomain):
+ """An instance of Exception represents a handled or unhandled exception that occurred during execution of the monitored application.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :ivar additional_properties: Unmatched properties from the message are deserialized to this
+ collection.
+ :vartype additional_properties: dict[str, any]
+ :ivar version: Required. Schema version.
+ :vartype version: int
+ :ivar exceptions: Required. Exception chain - list of inner exceptions.
+ :vartype exceptions: list[~azure_monitor_client.models.TelemetryExceptionDetails]
+ :ivar severity_level: Severity level. Mostly used to indicate exception severity level when it
+ is reported by logging library. Possible values include: "Verbose", "Information", "Warning",
+ "Error", "Critical".
+ :vartype severity_level: str or ~azure_monitor_client.models.SeverityLevel
+ :ivar problem_id: Identifier of where the exception was thrown in code. Used for exceptions
+ grouping. Typically a combination of exception type and a function from the call stack.
+ :vartype problem_id: str
+ :ivar properties: Collection of custom properties.
+ :vartype properties: dict[str, str]
+ :ivar measurements: Collection of custom measurements.
+ :vartype measurements: dict[str, float]
+ """
+
+ _validation = {
+ "version": {"required": True},
+ "exceptions": {"required": True},
+ "problem_id": {"max_length": 1024, "min_length": 0},
+ }
+
+ _attribute_map = {
+ "additional_properties": {"key": "", "type": "{object}"},
+ "version": {"key": "ver", "type": "int"},
+ "exceptions": {"key": "exceptions", "type": "[TelemetryExceptionDetails]"},
+ "severity_level": {"key": "severityLevel", "type": "str"},
+ "problem_id": {"key": "problemId", "type": "str"},
+ "properties": {"key": "properties", "type": "{str}"},
+ "measurements": {"key": "measurements", "type": "{float}"},
+ }
+
+ def __init__(self, **kwargs):
+ """
+ :keyword additional_properties: Unmatched properties from the message are deserialized to this
+ collection.
+ :paramtype additional_properties: dict[str, any]
+ :keyword version: Required. Schema version.
+ :paramtype version: int
+ :keyword exceptions: Required. Exception chain - list of inner exceptions.
+ :paramtype exceptions: list[~azure_monitor_client.models.TelemetryExceptionDetails]
+ :keyword severity_level: Severity level. Mostly used to indicate exception severity level when
+ it is reported by logging library. Possible values include: "Verbose", "Information",
+ "Warning", "Error", "Critical".
+ :paramtype severity_level: str or ~azure_monitor_client.models.SeverityLevel
+ :keyword problem_id: Identifier of where the exception was thrown in code. Used for exceptions
+ grouping. Typically a combination of exception type and a function from the call stack.
+ :paramtype problem_id: str
+ :keyword properties: Collection of custom properties.
+ :paramtype properties: dict[str, str]
+ :keyword measurements: Collection of custom measurements.
+ :paramtype measurements: dict[str, float]
+ """
+ super(TelemetryExceptionData, self).__init__(**kwargs)
+ self.exceptions = kwargs["exceptions"]
+ self.severity_level = kwargs.get("severity_level", None)
+ self.problem_id = kwargs.get("problem_id", None)
+ self.properties = kwargs.get("properties", None)
+ self.measurements = kwargs.get("measurements", None)
+
+
+class TelemetryExceptionDetails(msrest.serialization.Model):
+ """Exception details of the exception in a chain.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :ivar id: In case exception is nested (outer exception contains inner one), the id and outerId
+ properties are used to represent the nesting.
+ :vartype id: int
+ :ivar outer_id: The value of outerId is a reference to an element in ExceptionDetails that
+ represents the outer exception.
+ :vartype outer_id: int
+ :ivar type_name: Exception type name.
+ :vartype type_name: str
+ :ivar message: Required. Exception message.
+ :vartype message: str
+ :ivar has_full_stack: Indicates if full exception stack is provided in the exception. The stack
+ may be trimmed, such as in the case of a StackOverflow exception.
+ :vartype has_full_stack: bool
+ :ivar stack: Text describing the stack. Either stack or parsedStack should have a value.
+ :vartype stack: str
+ :ivar parsed_stack: List of stack frames. Either stack or parsedStack should have a value.
+ :vartype parsed_stack: list[~azure_monitor_client.models.StackFrame]
+ """
+
+ _validation = {
+ "type_name": {"max_length": 1024, "min_length": 0},
+ "message": {"required": True, "max_length": 32768, "min_length": 0},
+ "stack": {"max_length": 32768, "min_length": 0},
+ }
+
+ _attribute_map = {
+ "id": {"key": "id", "type": "int"},
+ "outer_id": {"key": "outerId", "type": "int"},
+ "type_name": {"key": "typeName", "type": "str"},
+ "message": {"key": "message", "type": "str"},
+ "has_full_stack": {"key": "hasFullStack", "type": "bool"},
+ "stack": {"key": "stack", "type": "str"},
+ "parsed_stack": {"key": "parsedStack", "type": "[StackFrame]"},
+ }
+
+ def __init__(self, **kwargs):
+ """
+ :keyword id: In case exception is nested (outer exception contains inner one), the id and
+ outerId properties are used to represent the nesting.
+ :paramtype id: int
+ :keyword outer_id: The value of outerId is a reference to an element in ExceptionDetails that
+ represents the outer exception.
+ :paramtype outer_id: int
+ :keyword type_name: Exception type name.
+ :paramtype type_name: str
+ :keyword message: Required. Exception message.
+ :paramtype message: str
+ :keyword has_full_stack: Indicates if full exception stack is provided in the exception. The
+ stack may be trimmed, such as in the case of a StackOverflow exception.
+ :paramtype has_full_stack: bool
+ :keyword stack: Text describing the stack. Either stack or parsedStack should have a value.
+ :paramtype stack: str
+ :keyword parsed_stack: List of stack frames. Either stack or parsedStack should have a value.
+ :paramtype parsed_stack: list[~azure_monitor_client.models.StackFrame]
+ """
+ super(TelemetryExceptionDetails, self).__init__(**kwargs)
+ self.id = kwargs.get("id", None)
+ self.outer_id = kwargs.get("outer_id", None)
+ self.type_name = kwargs.get("type_name", None)
+ self.message = kwargs["message"]
+ self.has_full_stack = kwargs.get("has_full_stack", True)
+ self.stack = kwargs.get("stack", None)
+ self.parsed_stack = kwargs.get("parsed_stack", None)
+
+
+class TelemetryItem(msrest.serialization.Model):
+ """System variables for a telemetry item.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :ivar version: Envelope version. For internal use only. By assigning this the default, it will
+ not be serialized within the payload unless changed to a value other than #1.
+ :vartype version: int
+ :ivar name: Required. Type name of telemetry data item.
+ :vartype name: str
+ :ivar time: Required. Event date time when telemetry item was created. This is the wall clock
+ time on the client when the event was generated. There is no guarantee that the client's time
+ is accurate. This field must be formatted in UTC ISO 8601 format, with a trailing 'Z'
+ character, as described publicly on https://en.wikipedia.org/wiki/ISO_8601#UTC. Note: the
+ number of decimal seconds digits provided are variable (and unspecified). Consumers should
+ handle this, i.e. managed code consumers should not use format 'O' for parsing as it specifies
+ a fixed length. Example: 2009-06-15T13:45:30.0000000Z.
+ :vartype time: ~datetime.datetime
+ :ivar sample_rate: Sampling rate used in application. This telemetry item represents 100 /
+ sampleRate actual telemetry items.
+ :vartype sample_rate: float
+ :ivar sequence: Sequence field used to track absolute order of uploaded events.
+ :vartype sequence: str
+ :ivar instrumentation_key: The instrumentation key of the Application Insights resource.
+ :vartype instrumentation_key: str
+ :ivar tags: A set of tags. Key/value collection of context properties. See ContextTagKeys for
+ information on available properties.
+ :vartype tags: dict[str, str]
+ :ivar data: Telemetry data item.
+ :vartype data: ~azure_monitor_client.models.MonitorBase
+ """
+
+ _validation = {
+ "name": {"required": True},
+ "time": {"required": True},
+ "sequence": {"max_length": 64, "min_length": 0},
+ }
+
+ _attribute_map = {
+ "version": {"key": "ver", "type": "int"},
+ "name": {"key": "name", "type": "str"},
+ "time": {"key": "time", "type": "iso-8601"},
+ "sample_rate": {"key": "sampleRate", "type": "float"},
+ "sequence": {"key": "seq", "type": "str"},
+ "instrumentation_key": {"key": "iKey", "type": "str"},
+ "tags": {"key": "tags", "type": "{str}"},
+ "data": {"key": "data", "type": "MonitorBase"},
+ }
+
+ def __init__(self, **kwargs):
+ """
+ :keyword version: Envelope version. For internal use only. By assigning this the default, it
+ will not be serialized within the payload unless changed to a value other than #1.
+ :paramtype version: int
+ :keyword name: Required. Type name of telemetry data item.
+ :paramtype name: str
+ :keyword time: Required. Event date time when telemetry item was created. This is the wall
+ clock time on the client when the event was generated. There is no guarantee that the client's
+ time is accurate. This field must be formatted in UTC ISO 8601 format, with a trailing 'Z'
+ character, as described publicly on https://en.wikipedia.org/wiki/ISO_8601#UTC. Note: the
+ number of decimal seconds digits provided are variable (and unspecified). Consumers should
+ handle this, i.e. managed code consumers should not use format 'O' for parsing as it specifies
+ a fixed length. Example: 2009-06-15T13:45:30.0000000Z.
+ :paramtype time: ~datetime.datetime
+ :keyword sample_rate: Sampling rate used in application. This telemetry item represents 100 /
+ sampleRate actual telemetry items.
+ :paramtype sample_rate: float
+ :keyword sequence: Sequence field used to track absolute order of uploaded events.
+ :paramtype sequence: str
+ :keyword instrumentation_key: The instrumentation key of the Application Insights resource.
+ :paramtype instrumentation_key: str
+ :keyword tags: A set of tags. Key/value collection of context properties. See ContextTagKeys
+ for information on available properties.
+ :paramtype tags: dict[str, str]
+ :keyword data: Telemetry data item.
+ :paramtype data: ~azure_monitor_client.models.MonitorBase
+ """
+ super(TelemetryItem, self).__init__(**kwargs)
+ self.version = kwargs.get("version", 1)
+ self.name = kwargs["name"]
+ self.time = kwargs["time"]
+ self.sample_rate = kwargs.get("sample_rate", 100)
+ self.sequence = kwargs.get("sequence", None)
+ self.instrumentation_key = kwargs.get("instrumentation_key", None)
+ self.tags = kwargs.get("tags", None)
+ self.data = kwargs.get("data", None)
+
+
+class TrackResponse(msrest.serialization.Model):
+ """Response containing the status of each telemetry item.
+
+ :ivar items_received: The number of items received.
+ :vartype items_received: int
+ :ivar items_accepted: The number of items accepted.
+ :vartype items_accepted: int
+ :ivar errors: An array of error detail objects.
+ :vartype errors: list[~azure_monitor_client.models.TelemetryErrorDetails]
+ """
+
+ _attribute_map = {
+ "items_received": {"key": "itemsReceived", "type": "int"},
+ "items_accepted": {"key": "itemsAccepted", "type": "int"},
+ "errors": {"key": "errors", "type": "[TelemetryErrorDetails]"},
+ }
+
+ def __init__(self, **kwargs):
+ """
+ :keyword items_received: The number of items received.
+ :paramtype items_received: int
+ :keyword items_accepted: The number of items accepted.
+ :paramtype items_accepted: int
+ :keyword errors: An array of error detail objects.
+ :paramtype errors: list[~azure_monitor_client.models.TelemetryErrorDetails]
+ """
+ super(TrackResponse, self).__init__(**kwargs)
+ self.items_received = kwargs.get("items_received", None)
+ self.items_accepted = kwargs.get("items_accepted", None)
+ self.errors = kwargs.get("errors", None)
diff --git a/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_generated/models/_models_py3.py b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_generated/models/_models_py3.py
new file mode 100644
index 00000000..9741154c
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_generated/models/_models_py3.py
@@ -0,0 +1,1342 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+import datetime
+from typing import Any, Dict, List, Optional, Union
+
+from azure.core.exceptions import HttpResponseError
+import msrest.serialization
+
+from ._azure_monitor_client_enums import *
+
+
+class MonitorDomain(msrest.serialization.Model):
+ """The abstract common base of all domains.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :ivar additional_properties: Unmatched properties from the message are deserialized to this
+ collection.
+ :vartype additional_properties: dict[str, any]
+ :ivar version: Required. Schema version.
+ :vartype version: int
+ """
+
+ _validation = {
+ "version": {"required": True},
+ }
+
+ _attribute_map = {
+ "additional_properties": {"key": "", "type": "{object}"},
+ "version": {"key": "ver", "type": "int"},
+ }
+
+ def __init__(self, *, version: int = 2, additional_properties: Optional[Dict[str, Any]] = None, **kwargs):
+ """
+ :keyword additional_properties: Unmatched properties from the message are deserialized to this
+ collection.
+ :paramtype additional_properties: dict[str, any]
+ :keyword version: Required. Schema version.
+ :paramtype version: int
+ """
+ super(MonitorDomain, self).__init__(**kwargs)
+ self.additional_properties = additional_properties
+ self.version = version
+
+
+class AvailabilityData(MonitorDomain):
+ """Instances of AvailabilityData represent the result of executing an availability test.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :ivar additional_properties: Unmatched properties from the message are deserialized to this
+ collection.
+ :vartype additional_properties: dict[str, any]
+ :ivar version: Required. Schema version.
+ :vartype version: int
+ :ivar id: Required. Identifier of a test run. Use it to correlate steps of test run and
+ telemetry generated by the service.
+ :vartype id: str
+ :ivar name: Required. Name of the test that these availability results represent.
+ :vartype name: str
+ :ivar duration: Required. Duration in format: DD.HH:MM:SS.MMMMMM. Must be less than 1000 days.
+ :vartype duration: str
+ :ivar success: Required. Success flag.
+ :vartype success: bool
+ :ivar run_location: Name of the location where the test was run from.
+ :vartype run_location: str
+ :ivar message: Diagnostic message for the result.
+ :vartype message: str
+ :ivar properties: Collection of custom properties.
+ :vartype properties: dict[str, str]
+ :ivar measurements: Collection of custom measurements.
+ :vartype measurements: dict[str, float]
+ """
+
+ _validation = {
+ "version": {"required": True},
+ "id": {"required": True, "max_length": 512, "min_length": 0},
+ "name": {"required": True, "max_length": 1024, "min_length": 0},
+ "duration": {"required": True},
+ "success": {"required": True},
+ "run_location": {"max_length": 1024, "min_length": 0},
+ "message": {"max_length": 8192, "min_length": 0},
+ }
+
+ _attribute_map = {
+ "additional_properties": {"key": "", "type": "{object}"},
+ "version": {"key": "ver", "type": "int"},
+ "id": {"key": "id", "type": "str"},
+ "name": {"key": "name", "type": "str"},
+ "duration": {"key": "duration", "type": "str"},
+ "success": {"key": "success", "type": "bool"},
+ "run_location": {"key": "runLocation", "type": "str"},
+ "message": {"key": "message", "type": "str"},
+ "properties": {"key": "properties", "type": "{str}"},
+ "measurements": {"key": "measurements", "type": "{float}"},
+ }
+
+ def __init__(
+ self,
+ *,
+ version: int = 2,
+ id: str,
+ name: str,
+ duration: str,
+ success: bool,
+ additional_properties: Optional[Dict[str, Any]] = None,
+ run_location: Optional[str] = None,
+ message: Optional[str] = None,
+ properties: Optional[Dict[str, str]] = None,
+ measurements: Optional[Dict[str, float]] = None,
+ **kwargs
+ ):
+ """
+ :keyword additional_properties: Unmatched properties from the message are deserialized to this
+ collection.
+ :paramtype additional_properties: dict[str, any]
+ :keyword version: Required. Schema version.
+ :paramtype version: int
+ :keyword id: Required. Identifier of a test run. Use it to correlate steps of test run and
+ telemetry generated by the service.
+ :paramtype id: str
+ :keyword name: Required. Name of the test that these availability results represent.
+ :paramtype name: str
+ :keyword duration: Required. Duration in format: DD.HH:MM:SS.MMMMMM. Must be less than 1000
+ days.
+ :paramtype duration: str
+ :keyword success: Required. Success flag.
+ :paramtype success: bool
+ :keyword run_location: Name of the location where the test was run from.
+ :paramtype run_location: str
+ :keyword message: Diagnostic message for the result.
+ :paramtype message: str
+ :keyword properties: Collection of custom properties.
+ :paramtype properties: dict[str, str]
+ :keyword measurements: Collection of custom measurements.
+ :paramtype measurements: dict[str, float]
+ """
+ super(AvailabilityData, self).__init__(additional_properties=additional_properties, version=version, **kwargs)
+ self.id = id
+ self.name = name
+ self.duration = duration
+ self.success = success
+ self.run_location = run_location
+ self.message = message
+ self.properties = properties
+ self.measurements = measurements
+
+
+class MessageData(MonitorDomain):
+ """Instances of Message represent printf-like trace statements that are text-searched. Log4Net, NLog and other text-based log file entries are translated into instances of this type. The message does not have measurements.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :ivar additional_properties: Unmatched properties from the message are deserialized to this
+ collection.
+ :vartype additional_properties: dict[str, any]
+ :ivar version: Required. Schema version.
+ :vartype version: int
+ :ivar message: Required. Trace message.
+ :vartype message: str
+ :ivar severity_level: Trace severity level. Possible values include: "Verbose", "Information",
+ "Warning", "Error", "Critical".
+ :vartype severity_level: str or ~azure_monitor_client.models.SeverityLevel
+ :ivar properties: Collection of custom properties.
+ :vartype properties: dict[str, str]
+ :ivar measurements: Collection of custom measurements.
+ :vartype measurements: dict[str, float]
+ """
+
+ _validation = {
+ "version": {"required": True},
+ "message": {"required": True, "max_length": 32768, "min_length": 0},
+ }
+
+ _attribute_map = {
+ "additional_properties": {"key": "", "type": "{object}"},
+ "version": {"key": "ver", "type": "int"},
+ "message": {"key": "message", "type": "str"},
+ "severity_level": {"key": "severityLevel", "type": "str"},
+ "properties": {"key": "properties", "type": "{str}"},
+ "measurements": {"key": "measurements", "type": "{float}"},
+ }
+
+ def __init__(
+ self,
+ *,
+ version: int = 2,
+ message: str,
+ additional_properties: Optional[Dict[str, Any]] = None,
+ severity_level: Optional[Union[str, "SeverityLevel"]] = None,
+ properties: Optional[Dict[str, str]] = None,
+ measurements: Optional[Dict[str, float]] = None,
+ **kwargs
+ ):
+ """
+ :keyword additional_properties: Unmatched properties from the message are deserialized to this
+ collection.
+ :paramtype additional_properties: dict[str, any]
+ :keyword version: Required. Schema version.
+ :paramtype version: int
+ :keyword message: Required. Trace message.
+ :paramtype message: str
+ :keyword severity_level: Trace severity level. Possible values include: "Verbose",
+ "Information", "Warning", "Error", "Critical".
+ :paramtype severity_level: str or ~azure_monitor_client.models.SeverityLevel
+ :keyword properties: Collection of custom properties.
+ :paramtype properties: dict[str, str]
+ :keyword measurements: Collection of custom measurements.
+ :paramtype measurements: dict[str, float]
+ """
+ super(MessageData, self).__init__(additional_properties=additional_properties, version=version, **kwargs)
+ self.message = message
+ self.severity_level = severity_level
+ self.properties = properties
+ self.measurements = measurements
+
+
+class MetricDataPoint(msrest.serialization.Model):
+ """Metric data single measurement.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :ivar namespace: Namespace of the metric.
+ :vartype namespace: str
+ :ivar name: Required. Name of the metric.
+ :vartype name: str
+ :ivar data_point_type: Metric type. Single measurement or the aggregated value. Possible values
+ include: "Measurement", "Aggregation".
+ :vartype data_point_type: str or ~azure_monitor_client.models.DataPointType
+ :ivar value: Required. Single value for measurement. Sum of individual measurements for the
+ aggregation.
+ :vartype value: float
+ :ivar count: Metric weight of the aggregated metric. Should not be set for a measurement.
+ :vartype count: int
+ :ivar min: Minimum value of the aggregated metric. Should not be set for a measurement.
+ :vartype min: float
+ :ivar max: Maximum value of the aggregated metric. Should not be set for a measurement.
+ :vartype max: float
+ :ivar std_dev: Standard deviation of the aggregated metric. Should not be set for a
+ measurement.
+ :vartype std_dev: float
+ """
+
+ _validation = {
+ "namespace": {"max_length": 256, "min_length": 0},
+ "name": {"required": True, "max_length": 1024, "min_length": 0},
+ "value": {"required": True},
+ }
+
+ _attribute_map = {
+ "namespace": {"key": "ns", "type": "str"},
+ "name": {"key": "name", "type": "str"},
+ "data_point_type": {"key": "kind", "type": "str"},
+ "value": {"key": "value", "type": "float"},
+ "count": {"key": "count", "type": "int"},
+ "min": {"key": "min", "type": "float"},
+ "max": {"key": "max", "type": "float"},
+ "std_dev": {"key": "stdDev", "type": "float"},
+ }
+
+ def __init__(
+ self,
+ *,
+ name: str,
+ value: float,
+ namespace: Optional[str] = None,
+ data_point_type: Optional[Union[str, "DataPointType"]] = None,
+ count: Optional[int] = None,
+ min: Optional[float] = None,
+ max: Optional[float] = None,
+ std_dev: Optional[float] = None,
+ **kwargs
+ ):
+ """
+ :keyword namespace: Namespace of the metric.
+ :paramtype namespace: str
+ :keyword name: Required. Name of the metric.
+ :paramtype name: str
+ :keyword data_point_type: Metric type. Single measurement or the aggregated value. Possible
+ values include: "Measurement", "Aggregation".
+ :paramtype data_point_type: str or ~azure_monitor_client.models.DataPointType
+ :keyword value: Required. Single value for measurement. Sum of individual measurements for the
+ aggregation.
+ :paramtype value: float
+ :keyword count: Metric weight of the aggregated metric. Should not be set for a measurement.
+ :paramtype count: int
+ :keyword min: Minimum value of the aggregated metric. Should not be set for a measurement.
+ :paramtype min: float
+ :keyword max: Maximum value of the aggregated metric. Should not be set for a measurement.
+ :paramtype max: float
+ :keyword std_dev: Standard deviation of the aggregated metric. Should not be set for a
+ measurement.
+ :paramtype std_dev: float
+ """
+ super(MetricDataPoint, self).__init__(**kwargs)
+ self.namespace = namespace
+ self.name = name
+ self.data_point_type = data_point_type
+ self.value = value
+ self.count = count
+ self.min = min
+ self.max = max
+ self.std_dev = std_dev
+
+
+class MetricsData(MonitorDomain):
+ """An instance of the Metric item is a list of measurements (single data points) and/or aggregations.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :ivar additional_properties: Unmatched properties from the message are deserialized to this
+ collection.
+ :vartype additional_properties: dict[str, any]
+ :ivar version: Required. Schema version.
+ :vartype version: int
+ :ivar metrics: Required. List of metrics. Only one metric in the list is currently supported by
+ Application Insights storage. If multiple data points were sent only the first one will be
+ used.
+ :vartype metrics: list[~azure_monitor_client.models.MetricDataPoint]
+ :ivar properties: Collection of custom properties.
+ :vartype properties: dict[str, str]
+ """
+
+ _validation = {
+ "version": {"required": True},
+ "metrics": {"required": True},
+ }
+
+ _attribute_map = {
+ "additional_properties": {"key": "", "type": "{object}"},
+ "version": {"key": "ver", "type": "int"},
+ "metrics": {"key": "metrics", "type": "[MetricDataPoint]"},
+ "properties": {"key": "properties", "type": "{str}"},
+ }
+
+ def __init__(
+ self,
+ *,
+ version: int = 2,
+ metrics: List["MetricDataPoint"],
+ additional_properties: Optional[Dict[str, Any]] = None,
+ properties: Optional[Dict[str, str]] = None,
+ **kwargs
+ ):
+ """
+ :keyword additional_properties: Unmatched properties from the message are deserialized to this
+ collection.
+ :paramtype additional_properties: dict[str, any]
+ :keyword version: Required. Schema version.
+ :paramtype version: int
+ :keyword metrics: Required. List of metrics. Only one metric in the list is currently supported
+ by Application Insights storage. If multiple data points were sent only the first one will be
+ used.
+ :paramtype metrics: list[~azure_monitor_client.models.MetricDataPoint]
+ :keyword properties: Collection of custom properties.
+ :paramtype properties: dict[str, str]
+ """
+ super(MetricsData, self).__init__(additional_properties=additional_properties, version=version, **kwargs)
+ self.metrics = metrics
+ self.properties = properties
+
+
+class MonitorBase(msrest.serialization.Model):
+ """Data struct to contain only C section with custom fields.
+
+ :ivar base_type: Name of item (B section) if any. If telemetry data is derived straight from
+ this, this should be null.
+ :vartype base_type: str
+ :ivar base_data: The data payload for the telemetry request.
+ :vartype base_data: ~azure_monitor_client.models.MonitorDomain
+ """
+
+ _attribute_map = {
+ "base_type": {"key": "baseType", "type": "str"},
+ "base_data": {"key": "baseData", "type": "MonitorDomain"},
+ }
+
+ def __init__(self, *, base_type: Optional[str] = None, base_data: Optional["MonitorDomain"] = None, **kwargs):
+ """
+ :keyword base_type: Name of item (B section) if any. If telemetry data is derived straight from
+ this, this should be null.
+ :paramtype base_type: str
+ :keyword base_data: The data payload for the telemetry request.
+ :paramtype base_data: ~azure_monitor_client.models.MonitorDomain
+ """
+ super(MonitorBase, self).__init__(**kwargs)
+ self.base_type = base_type
+ self.base_data = base_data
+
+
+class PageViewData(MonitorDomain):
+ """An instance of PageView represents a generic action on a page like a button click. It is also the base type for PageView.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :ivar additional_properties: Unmatched properties from the message are deserialized to this
+ collection.
+ :vartype additional_properties: dict[str, any]
+ :ivar version: Required. Schema version.
+ :vartype version: int
+ :ivar id: Required. Identifier of a page view instance. Used for correlation between page view
+ and other telemetry items.
+ :vartype id: str
+ :ivar name: Required. Event name. Keep it low cardinality to allow proper grouping and useful
+ metrics.
+ :vartype name: str
+ :ivar url: Request URL with all query string parameters.
+ :vartype url: str
+ :ivar duration: Request duration in format: DD.HH:MM:SS.MMMMMM. For a page view (PageViewData),
+ this is the duration. For a page view with performance information (PageViewPerfData), this is
+ the page load time. Must be less than 1000 days.
+ :vartype duration: str
+ :ivar referred_uri: Fully qualified page URI or URL of the referring page; if unknown, leave
+ blank.
+ :vartype referred_uri: str
+ :ivar properties: Collection of custom properties.
+ :vartype properties: dict[str, str]
+ :ivar measurements: Collection of custom measurements.
+ :vartype measurements: dict[str, float]
+ """
+
+ _validation = {
+ "version": {"required": True},
+ "id": {"required": True, "max_length": 512, "min_length": 0},
+ "name": {"required": True, "max_length": 1024, "min_length": 0},
+ "url": {"max_length": 2048, "min_length": 0},
+ "referred_uri": {"max_length": 2048, "min_length": 0},
+ }
+
+ _attribute_map = {
+ "additional_properties": {"key": "", "type": "{object}"},
+ "version": {"key": "ver", "type": "int"},
+ "id": {"key": "id", "type": "str"},
+ "name": {"key": "name", "type": "str"},
+ "url": {"key": "url", "type": "str"},
+ "duration": {"key": "duration", "type": "str"},
+ "referred_uri": {"key": "referredUri", "type": "str"},
+ "properties": {"key": "properties", "type": "{str}"},
+ "measurements": {"key": "measurements", "type": "{float}"},
+ }
+
+ def __init__(
+ self,
+ *,
+ version: int = 2,
+ id: str,
+ name: str,
+ additional_properties: Optional[Dict[str, Any]] = None,
+ url: Optional[str] = None,
+ duration: Optional[str] = None,
+ referred_uri: Optional[str] = None,
+ properties: Optional[Dict[str, str]] = None,
+ measurements: Optional[Dict[str, float]] = None,
+ **kwargs
+ ):
+ """
+ :keyword additional_properties: Unmatched properties from the message are deserialized to this
+ collection.
+ :paramtype additional_properties: dict[str, any]
+ :keyword version: Required. Schema version.
+ :paramtype version: int
+ :keyword id: Required. Identifier of a page view instance. Used for correlation between page
+ view and other telemetry items.
+ :paramtype id: str
+ :keyword name: Required. Event name. Keep it low cardinality to allow proper grouping and
+ useful metrics.
+ :paramtype name: str
+ :keyword url: Request URL with all query string parameters.
+ :paramtype url: str
+ :keyword duration: Request duration in format: DD.HH:MM:SS.MMMMMM. For a page view
+ (PageViewData), this is the duration. For a page view with performance information
+ (PageViewPerfData), this is the page load time. Must be less than 1000 days.
+ :paramtype duration: str
+ :keyword referred_uri: Fully qualified page URI or URL of the referring page; if unknown, leave
+ blank.
+ :paramtype referred_uri: str
+ :keyword properties: Collection of custom properties.
+ :paramtype properties: dict[str, str]
+ :keyword measurements: Collection of custom measurements.
+ :paramtype measurements: dict[str, float]
+ """
+ super(PageViewData, self).__init__(additional_properties=additional_properties, version=version, **kwargs)
+ self.id = id
+ self.name = name
+ self.url = url
+ self.duration = duration
+ self.referred_uri = referred_uri
+ self.properties = properties
+ self.measurements = measurements
+
+
+class PageViewPerfData(MonitorDomain):
+ """An instance of PageViewPerf represents: a page view with no performance data, a page view with performance data, or just the performance data of an earlier page request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :ivar additional_properties: Unmatched properties from the message are deserialized to this
+ collection.
+ :vartype additional_properties: dict[str, any]
+ :ivar version: Required. Schema version.
+ :vartype version: int
+ :ivar id: Required. Identifier of a page view instance. Used for correlation between page view
+ and other telemetry items.
+ :vartype id: str
+ :ivar name: Required. Event name. Keep it low cardinality to allow proper grouping and useful
+ metrics.
+ :vartype name: str
+ :ivar url: Request URL with all query string parameters.
+ :vartype url: str
+ :ivar duration: Request duration in format: DD.HH:MM:SS.MMMMMM. For a page view (PageViewData),
+ this is the duration. For a page view with performance information (PageViewPerfData), this is
+ the page load time. Must be less than 1000 days.
+ :vartype duration: str
+ :ivar perf_total: Performance total in TimeSpan 'G' (general long) format: d:hh:mm:ss.fffffff.
+ :vartype perf_total: str
+ :ivar network_connect: Network connection time in TimeSpan 'G' (general long) format:
+ d:hh:mm:ss.fffffff.
+ :vartype network_connect: str
+ :ivar sent_request: Sent request time in TimeSpan 'G' (general long) format:
+ d:hh:mm:ss.fffffff.
+ :vartype sent_request: str
+ :ivar received_response: Received response time in TimeSpan 'G' (general long) format:
+ d:hh:mm:ss.fffffff.
+ :vartype received_response: str
+ :ivar dom_processing: DOM processing time in TimeSpan 'G' (general long) format:
+ d:hh:mm:ss.fffffff.
+ :vartype dom_processing: str
+ :ivar properties: Collection of custom properties.
+ :vartype properties: dict[str, str]
+ :ivar measurements: Collection of custom measurements.
+ :vartype measurements: dict[str, float]
+ """
+
+ _validation = {
+ "version": {"required": True},
+ "id": {"required": True, "max_length": 512, "min_length": 0},
+ "name": {"required": True, "max_length": 1024, "min_length": 0},
+ "url": {"max_length": 2048, "min_length": 0},
+ }
+
+ _attribute_map = {
+ "additional_properties": {"key": "", "type": "{object}"},
+ "version": {"key": "ver", "type": "int"},
+ "id": {"key": "id", "type": "str"},
+ "name": {"key": "name", "type": "str"},
+ "url": {"key": "url", "type": "str"},
+ "duration": {"key": "duration", "type": "str"},
+ "perf_total": {"key": "perfTotal", "type": "str"},
+ "network_connect": {"key": "networkConnect", "type": "str"},
+ "sent_request": {"key": "sentRequest", "type": "str"},
+ "received_response": {"key": "receivedResponse", "type": "str"},
+ "dom_processing": {"key": "domProcessing", "type": "str"},
+ "properties": {"key": "properties", "type": "{str}"},
+ "measurements": {"key": "measurements", "type": "{float}"},
+ }
+
+ def __init__(
+ self,
+ *,
+ version: int = 2,
+ id: str,
+ name: str,
+ additional_properties: Optional[Dict[str, Any]] = None,
+ url: Optional[str] = None,
+ duration: Optional[str] = None,
+ perf_total: Optional[str] = None,
+ network_connect: Optional[str] = None,
+ sent_request: Optional[str] = None,
+ received_response: Optional[str] = None,
+ dom_processing: Optional[str] = None,
+ properties: Optional[Dict[str, str]] = None,
+ measurements: Optional[Dict[str, float]] = None,
+ **kwargs
+ ):
+ """
+ :keyword additional_properties: Unmatched properties from the message are deserialized to this
+ collection.
+ :paramtype additional_properties: dict[str, any]
+ :keyword version: Required. Schema version.
+ :paramtype version: int
+ :keyword id: Required. Identifier of a page view instance. Used for correlation between page
+ view and other telemetry items.
+ :paramtype id: str
+ :keyword name: Required. Event name. Keep it low cardinality to allow proper grouping and
+ useful metrics.
+ :paramtype name: str
+ :keyword url: Request URL with all query string parameters.
+ :paramtype url: str
+ :keyword duration: Request duration in format: DD.HH:MM:SS.MMMMMM. For a page view
+ (PageViewData), this is the duration. For a page view with performance information
+ (PageViewPerfData), this is the page load time. Must be less than 1000 days.
+ :paramtype duration: str
+ :keyword perf_total: Performance total in TimeSpan 'G' (general long) format:
+ d:hh:mm:ss.fffffff.
+ :paramtype perf_total: str
+ :keyword network_connect: Network connection time in TimeSpan 'G' (general long) format:
+ d:hh:mm:ss.fffffff.
+ :paramtype network_connect: str
+ :keyword sent_request: Sent request time in TimeSpan 'G' (general long) format:
+ d:hh:mm:ss.fffffff.
+ :paramtype sent_request: str
+ :keyword received_response: Received response time in TimeSpan 'G' (general long) format:
+ d:hh:mm:ss.fffffff.
+ :paramtype received_response: str
+ :keyword dom_processing: DOM processing time in TimeSpan 'G' (general long) format:
+ d:hh:mm:ss.fffffff.
+ :paramtype dom_processing: str
+ :keyword properties: Collection of custom properties.
+ :paramtype properties: dict[str, str]
+ :keyword measurements: Collection of custom measurements.
+ :paramtype measurements: dict[str, float]
+ """
+ super(PageViewPerfData, self).__init__(additional_properties=additional_properties, version=version, **kwargs)
+ self.id = id
+ self.name = name
+ self.url = url
+ self.duration = duration
+ self.perf_total = perf_total
+ self.network_connect = network_connect
+ self.sent_request = sent_request
+ self.received_response = received_response
+ self.dom_processing = dom_processing
+ self.properties = properties
+ self.measurements = measurements
+
+
+class RemoteDependencyData(MonitorDomain):
+ """An instance of Remote Dependency represents an interaction of the monitored component with a remote component/service like SQL or an HTTP endpoint.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :ivar additional_properties: Unmatched properties from the message are deserialized to this
+ collection.
+ :vartype additional_properties: dict[str, any]
+ :ivar version: Required. Schema version.
+ :vartype version: int
+ :ivar id: Identifier of a dependency call instance. Used for correlation with the request
+ telemetry item corresponding to this dependency call.
+ :vartype id: str
+ :ivar name: Required. Name of the command initiated with this dependency call. Low cardinality
+ value. Examples are stored procedure name and URL path template.
+ :vartype name: str
+ :ivar result_code: Result code of a dependency call. Examples are SQL error code and HTTP
+ status code.
+ :vartype result_code: str
+ :ivar data: Command initiated by this dependency call. Examples are SQL statement and HTTP URL
+ with all query parameters.
+ :vartype data: str
+ :ivar type: Dependency type name. Very low cardinality value for logical grouping of
+ dependencies and interpretation of other fields like commandName and resultCode. Examples are
+ SQL, Azure table, and HTTP.
+ :vartype type: str
+ :ivar target: Target site of a dependency call. Examples are server name, host address.
+ :vartype target: str
+ :ivar duration: Required. Request duration in format: DD.HH:MM:SS.MMMMMM. Must be less than
+ 1000 days.
+ :vartype duration: str
+ :ivar success: Indication of successful or unsuccessful call.
+ :vartype success: bool
+ :ivar properties: Collection of custom properties.
+ :vartype properties: dict[str, str]
+ :ivar measurements: Collection of custom measurements.
+ :vartype measurements: dict[str, float]
+ """
+
+ _validation = {
+ "version": {"required": True},
+ "id": {"max_length": 512, "min_length": 0},
+ "name": {"required": True, "max_length": 1024, "min_length": 0},
+ "result_code": {"max_length": 1024, "min_length": 0},
+ "data": {"max_length": 8192, "min_length": 0},
+ "type": {"max_length": 1024, "min_length": 0},
+ "target": {"max_length": 1024, "min_length": 0},
+ "duration": {"required": True},
+ }
+
+ _attribute_map = {
+ "additional_properties": {"key": "", "type": "{object}"},
+ "version": {"key": "ver", "type": "int"},
+ "id": {"key": "id", "type": "str"},
+ "name": {"key": "name", "type": "str"},
+ "result_code": {"key": "resultCode", "type": "str"},
+ "data": {"key": "data", "type": "str"},
+ "type": {"key": "type", "type": "str"},
+ "target": {"key": "target", "type": "str"},
+ "duration": {"key": "duration", "type": "str"},
+ "success": {"key": "success", "type": "bool"},
+ "properties": {"key": "properties", "type": "{str}"},
+ "measurements": {"key": "measurements", "type": "{float}"},
+ }
+
+ def __init__(
+ self,
+ *,
+ version: int = 2,
+ name: str,
+ duration: str,
+ additional_properties: Optional[Dict[str, Any]] = None,
+ id: Optional[str] = None,
+ result_code: Optional[str] = None,
+ data: Optional[str] = None,
+ type: Optional[str] = None,
+ target: Optional[str] = None,
+ success: Optional[bool] = True,
+ properties: Optional[Dict[str, str]] = None,
+ measurements: Optional[Dict[str, float]] = None,
+ **kwargs
+ ):
+ """
+ :keyword additional_properties: Unmatched properties from the message are deserialized to this
+ collection.
+ :paramtype additional_properties: dict[str, any]
+ :keyword version: Required. Schema version.
+ :paramtype version: int
+ :keyword id: Identifier of a dependency call instance. Used for correlation with the request
+ telemetry item corresponding to this dependency call.
+ :paramtype id: str
+ :keyword name: Required. Name of the command initiated with this dependency call. Low
+ cardinality value. Examples are stored procedure name and URL path template.
+ :paramtype name: str
+ :keyword result_code: Result code of a dependency call. Examples are SQL error code and HTTP
+ status code.
+ :paramtype result_code: str
+ :keyword data: Command initiated by this dependency call. Examples are SQL statement and HTTP
+ URL with all query parameters.
+ :paramtype data: str
+ :keyword type: Dependency type name. Very low cardinality value for logical grouping of
+ dependencies and interpretation of other fields like commandName and resultCode. Examples are
+ SQL, Azure table, and HTTP.
+ :paramtype type: str
+ :keyword target: Target site of a dependency call. Examples are server name, host address.
+ :paramtype target: str
+ :keyword duration: Required. Request duration in format: DD.HH:MM:SS.MMMMMM. Must be less than
+ 1000 days.
+ :paramtype duration: str
+ :keyword success: Indication of successful or unsuccessful call.
+ :paramtype success: bool
+ :keyword properties: Collection of custom properties.
+ :paramtype properties: dict[str, str]
+ :keyword measurements: Collection of custom measurements.
+ :paramtype measurements: dict[str, float]
+ """
+ super(RemoteDependencyData, self).__init__(
+ additional_properties=additional_properties, version=version, **kwargs
+ )
+ self.id = id
+ self.name = name
+ self.result_code = result_code
+ self.data = data
+ self.type = type
+ self.target = target
+ self.duration = duration
+ self.success = success
+ self.properties = properties
+ self.measurements = measurements
+
+
+class RequestData(MonitorDomain):
+ """An instance of Request represents completion of an external request to the application to do work and contains a summary of that request execution and the results.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :ivar additional_properties: Unmatched properties from the message are deserialized to this
+ collection.
+ :vartype additional_properties: dict[str, any]
+ :ivar version: Required. Schema version.
+ :vartype version: int
+ :ivar id: Required. Identifier of a request call instance. Used for correlation between request
+ and other telemetry items.
+ :vartype id: str
+ :ivar name: Name of the request. Represents code path taken to process request. Low cardinality
+ value to allow better grouping of requests. For HTTP requests it represents the HTTP method and
+ URL path template like 'GET /values/{id}'.
+ :vartype name: str
+ :ivar duration: Required. Request duration in format: DD.HH:MM:SS.MMMMMM. Must be less than
+ 1000 days.
+ :vartype duration: str
+ :ivar success: Required. Indication of successful or unsuccessful call.
+ :vartype success: bool
+ :ivar response_code: Required. Result of a request execution. HTTP status code for HTTP
+ requests.
+ :vartype response_code: str
+ :ivar source: Source of the request. Examples are the instrumentation key of the caller or the
+ ip address of the caller.
+ :vartype source: str
+ :ivar url: Request URL with all query string parameters.
+ :vartype url: str
+ :ivar properties: Collection of custom properties.
+ :vartype properties: dict[str, str]
+ :ivar measurements: Collection of custom measurements.
+ :vartype measurements: dict[str, float]
+ """
+
+ _validation = {
+ "version": {"required": True},
+ "id": {"required": True, "max_length": 512, "min_length": 0},
+ "name": {"max_length": 1024, "min_length": 0},
+ "duration": {"required": True},
+ "success": {"required": True},
+ "response_code": {"required": True, "max_length": 1024, "min_length": 0},
+ "source": {"max_length": 1024, "min_length": 0},
+ "url": {"max_length": 2048, "min_length": 0},
+ }
+
+ _attribute_map = {
+ "additional_properties": {"key": "", "type": "{object}"},
+ "version": {"key": "ver", "type": "int"},
+ "id": {"key": "id", "type": "str"},
+ "name": {"key": "name", "type": "str"},
+ "duration": {"key": "duration", "type": "str"},
+ "success": {"key": "success", "type": "bool"},
+ "response_code": {"key": "responseCode", "type": "str"},
+ "source": {"key": "source", "type": "str"},
+ "url": {"key": "url", "type": "str"},
+ "properties": {"key": "properties", "type": "{str}"},
+ "measurements": {"key": "measurements", "type": "{float}"},
+ }
+
+ def __init__(
+ self,
+ *,
+ version: int = 2,
+ id: str,
+ duration: str,
+ success: bool = True,
+ response_code: str,
+ additional_properties: Optional[Dict[str, Any]] = None,
+ name: Optional[str] = None,
+ source: Optional[str] = None,
+ url: Optional[str] = None,
+ properties: Optional[Dict[str, str]] = None,
+ measurements: Optional[Dict[str, float]] = None,
+ **kwargs
+ ):
+ """
+ :keyword additional_properties: Unmatched properties from the message are deserialized to this
+ collection.
+ :paramtype additional_properties: dict[str, any]
+ :keyword version: Required. Schema version.
+ :paramtype version: int
+ :keyword id: Required. Identifier of a request call instance. Used for correlation between
+ request and other telemetry items.
+ :paramtype id: str
+ :keyword name: Name of the request. Represents code path taken to process request. Low
+ cardinality value to allow better grouping of requests. For HTTP requests it represents the
+ HTTP method and URL path template like 'GET /values/{id}'.
+ :paramtype name: str
+ :keyword duration: Required. Request duration in format: DD.HH:MM:SS.MMMMMM. Must be less than
+ 1000 days.
+ :paramtype duration: str
+ :keyword success: Required. Indication of successful or unsuccessful call.
+ :paramtype success: bool
+ :keyword response_code: Required. Result of a request execution. HTTP status code for HTTP
+ requests.
+ :paramtype response_code: str
+ :keyword source: Source of the request. Examples are the instrumentation key of the caller or
+ the ip address of the caller.
+ :paramtype source: str
+ :keyword url: Request URL with all query string parameters.
+ :paramtype url: str
+ :keyword properties: Collection of custom properties.
+ :paramtype properties: dict[str, str]
+ :keyword measurements: Collection of custom measurements.
+ :paramtype measurements: dict[str, float]
+ """
+ super(RequestData, self).__init__(additional_properties=additional_properties, version=version, **kwargs)
+ self.id = id
+ self.name = name
+ self.duration = duration
+ self.success = success
+ self.response_code = response_code
+ self.source = source
+ self.url = url
+ self.properties = properties
+ self.measurements = measurements
+
+
+class StackFrame(msrest.serialization.Model):
+ """Stack frame information.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :ivar level: Required.
+ :vartype level: int
+ :ivar method: Required. Method name.
+ :vartype method: str
+ :ivar assembly: Name of the assembly (dll, jar, etc.) containing this function.
+ :vartype assembly: str
+ :ivar file_name: File name or URL of the method implementation.
+ :vartype file_name: str
+ :ivar line: Line number of the code implementation.
+ :vartype line: int
+ """
+
+ _validation = {
+ "level": {"required": True},
+ "method": {"required": True, "max_length": 1024, "min_length": 0},
+ "assembly": {"max_length": 1024, "min_length": 0},
+ "file_name": {"max_length": 1024, "min_length": 0},
+ }
+
+ _attribute_map = {
+ "level": {"key": "level", "type": "int"},
+ "method": {"key": "method", "type": "str"},
+ "assembly": {"key": "assembly", "type": "str"},
+ "file_name": {"key": "fileName", "type": "str"},
+ "line": {"key": "line", "type": "int"},
+ }
+
+ def __init__(
+ self,
+ *,
+ level: int,
+ method: str,
+ assembly: Optional[str] = None,
+ file_name: Optional[str] = None,
+ line: Optional[int] = None,
+ **kwargs
+ ):
+ """
+ :keyword level: Required.
+ :paramtype level: int
+ :keyword method: Required. Method name.
+ :paramtype method: str
+ :keyword assembly: Name of the assembly (dll, jar, etc.) containing this function.
+ :paramtype assembly: str
+ :keyword file_name: File name or URL of the method implementation.
+ :paramtype file_name: str
+ :keyword line: Line number of the code implementation.
+ :paramtype line: int
+ """
+ super(StackFrame, self).__init__(**kwargs)
+ self.level = level
+ self.method = method
+ self.assembly = assembly
+ self.file_name = file_name
+ self.line = line
+
+
+class TelemetryErrorDetails(msrest.serialization.Model):
+ """The error details.
+
+ :ivar index: The index in the original payload of the item.
+ :vartype index: int
+ :ivar status_code: The item specific `HTTP Response status code <#Response Status Codes>`_.
+ :vartype status_code: int
+ :ivar message: The error message.
+ :vartype message: str
+ """
+
+ _attribute_map = {
+ "index": {"key": "index", "type": "int"},
+ "status_code": {"key": "statusCode", "type": "int"},
+ "message": {"key": "message", "type": "str"},
+ }
+
+ def __init__(
+ self, *, index: Optional[int] = None, status_code: Optional[int] = None, message: Optional[str] = None, **kwargs
+ ):
+ """
+ :keyword index: The index in the original payload of the item.
+ :paramtype index: int
+ :keyword status_code: The item specific `HTTP Response status code <#Response Status Codes>`_.
+ :paramtype status_code: int
+ :keyword message: The error message.
+ :paramtype message: str
+ """
+ super(TelemetryErrorDetails, self).__init__(**kwargs)
+ self.index = index
+ self.status_code = status_code
+ self.message = message
+
+
+class TelemetryEventData(MonitorDomain):
+ """Instances of Event represent structured event records that can be grouped and searched by their properties. Event data item also creates a metric of event count by name.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :ivar additional_properties: Unmatched properties from the message are deserialized to this
+ collection.
+ :vartype additional_properties: dict[str, any]
+ :ivar version: Required. Schema version.
+ :vartype version: int
+ :ivar name: Required. Event name. Keep it low cardinality to allow proper grouping and useful
+ metrics.
+ :vartype name: str
+ :ivar properties: Collection of custom properties.
+ :vartype properties: dict[str, str]
+ :ivar measurements: Collection of custom measurements.
+ :vartype measurements: dict[str, float]
+ """
+
+ _validation = {
+ "version": {"required": True},
+ "name": {"required": True, "max_length": 512, "min_length": 0},
+ }
+
+ _attribute_map = {
+ "additional_properties": {"key": "", "type": "{object}"},
+ "version": {"key": "ver", "type": "int"},
+ "name": {"key": "name", "type": "str"},
+ "properties": {"key": "properties", "type": "{str}"},
+ "measurements": {"key": "measurements", "type": "{float}"},
+ }
+
+ def __init__(
+ self,
+ *,
+ version: int = 2,
+ name: str,
+ additional_properties: Optional[Dict[str, Any]] = None,
+ properties: Optional[Dict[str, str]] = None,
+ measurements: Optional[Dict[str, float]] = None,
+ **kwargs
+ ):
+ """
+ :keyword additional_properties: Unmatched properties from the message are deserialized to this
+ collection.
+ :paramtype additional_properties: dict[str, any]
+ :keyword version: Required. Schema version.
+ :paramtype version: int
+ :keyword name: Required. Event name. Keep it low cardinality to allow proper grouping and
+ useful metrics.
+ :paramtype name: str
+ :keyword properties: Collection of custom properties.
+ :paramtype properties: dict[str, str]
+ :keyword measurements: Collection of custom measurements.
+ :paramtype measurements: dict[str, float]
+ """
+ super(TelemetryEventData, self).__init__(additional_properties=additional_properties, version=version, **kwargs)
+ self.name = name
+ self.properties = properties
+ self.measurements = measurements
+
+
+class TelemetryExceptionData(MonitorDomain):
+ """An instance of Exception represents a handled or unhandled exception that occurred during execution of the monitored application.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :ivar additional_properties: Unmatched properties from the message are deserialized to this
+ collection.
+ :vartype additional_properties: dict[str, any]
+ :ivar version: Required. Schema version.
+ :vartype version: int
+ :ivar exceptions: Required. Exception chain - list of inner exceptions.
+ :vartype exceptions: list[~azure_monitor_client.models.TelemetryExceptionDetails]
+ :ivar severity_level: Severity level. Mostly used to indicate exception severity level when it
+ is reported by logging library. Possible values include: "Verbose", "Information", "Warning",
+ "Error", "Critical".
+ :vartype severity_level: str or ~azure_monitor_client.models.SeverityLevel
+ :ivar problem_id: Identifier of where the exception was thrown in code. Used for exceptions
+ grouping. Typically a combination of exception type and a function from the call stack.
+ :vartype problem_id: str
+ :ivar properties: Collection of custom properties.
+ :vartype properties: dict[str, str]
+ :ivar measurements: Collection of custom measurements.
+ :vartype measurements: dict[str, float]
+ """
+
+ _validation = {
+ "version": {"required": True},
+ "exceptions": {"required": True},
+ "problem_id": {"max_length": 1024, "min_length": 0},
+ }
+
+ _attribute_map = {
+ "additional_properties": {"key": "", "type": "{object}"},
+ "version": {"key": "ver", "type": "int"},
+ "exceptions": {"key": "exceptions", "type": "[TelemetryExceptionDetails]"},
+ "severity_level": {"key": "severityLevel", "type": "str"},
+ "problem_id": {"key": "problemId", "type": "str"},
+ "properties": {"key": "properties", "type": "{str}"},
+ "measurements": {"key": "measurements", "type": "{float}"},
+ }
+
+ def __init__(
+ self,
+ *,
+ version: int = 2,
+ exceptions: List["TelemetryExceptionDetails"],
+ additional_properties: Optional[Dict[str, Any]] = None,
+ severity_level: Optional[Union[str, "SeverityLevel"]] = None,
+ problem_id: Optional[str] = None,
+ properties: Optional[Dict[str, str]] = None,
+ measurements: Optional[Dict[str, float]] = None,
+ **kwargs
+ ):
+ """
+ :keyword additional_properties: Unmatched properties from the message are deserialized to this
+ collection.
+ :paramtype additional_properties: dict[str, any]
+ :keyword version: Required. Schema version.
+ :paramtype version: int
+ :keyword exceptions: Required. Exception chain - list of inner exceptions.
+ :paramtype exceptions: list[~azure_monitor_client.models.TelemetryExceptionDetails]
+ :keyword severity_level: Severity level. Mostly used to indicate exception severity level when
+ it is reported by logging library. Possible values include: "Verbose", "Information",
+ "Warning", "Error", "Critical".
+ :paramtype severity_level: str or ~azure_monitor_client.models.SeverityLevel
+ :keyword problem_id: Identifier of where the exception was thrown in code. Used for exceptions
+ grouping. Typically a combination of exception type and a function from the call stack.
+ :paramtype problem_id: str
+ :keyword properties: Collection of custom properties.
+ :paramtype properties: dict[str, str]
+ :keyword measurements: Collection of custom measurements.
+ :paramtype measurements: dict[str, float]
+ """
+ super(TelemetryExceptionData, self).__init__(
+ additional_properties=additional_properties, version=version, **kwargs
+ )
+ self.exceptions = exceptions
+ self.severity_level = severity_level
+ self.problem_id = problem_id
+ self.properties = properties
+ self.measurements = measurements
+
+
+class TelemetryExceptionDetails(msrest.serialization.Model):
+ """Exception details of the exception in a chain.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :ivar id: In case exception is nested (outer exception contains inner one), the id and outerId
+ properties are used to represent the nesting.
+ :vartype id: int
+ :ivar outer_id: The value of outerId is a reference to an element in ExceptionDetails that
+ represents the outer exception.
+ :vartype outer_id: int
+ :ivar type_name: Exception type name.
+ :vartype type_name: str
+ :ivar message: Required. Exception message.
+ :vartype message: str
+ :ivar has_full_stack: Indicates if full exception stack is provided in the exception. The stack
+ may be trimmed, such as in the case of a StackOverflow exception.
+ :vartype has_full_stack: bool
+ :ivar stack: Text describing the stack. Either stack or parsedStack should have a value.
+ :vartype stack: str
+ :ivar parsed_stack: List of stack frames. Either stack or parsedStack should have a value.
+ :vartype parsed_stack: list[~azure_monitor_client.models.StackFrame]
+ """
+
+ _validation = {
+ "type_name": {"max_length": 1024, "min_length": 0},
+ "message": {"required": True, "max_length": 32768, "min_length": 0},
+ "stack": {"max_length": 32768, "min_length": 0},
+ }
+
+ _attribute_map = {
+ "id": {"key": "id", "type": "int"},
+ "outer_id": {"key": "outerId", "type": "int"},
+ "type_name": {"key": "typeName", "type": "str"},
+ "message": {"key": "message", "type": "str"},
+ "has_full_stack": {"key": "hasFullStack", "type": "bool"},
+ "stack": {"key": "stack", "type": "str"},
+ "parsed_stack": {"key": "parsedStack", "type": "[StackFrame]"},
+ }
+
+ def __init__(
+ self,
+ *,
+ message: str,
+ id: Optional[int] = None,
+ outer_id: Optional[int] = None,
+ type_name: Optional[str] = None,
+ has_full_stack: Optional[bool] = True,
+ stack: Optional[str] = None,
+ parsed_stack: Optional[List["StackFrame"]] = None,
+ **kwargs
+ ):
+ """
+ :keyword id: In case exception is nested (outer exception contains inner one), the id and
+ outerId properties are used to represent the nesting.
+ :paramtype id: int
+ :keyword outer_id: The value of outerId is a reference to an element in ExceptionDetails that
+ represents the outer exception.
+ :paramtype outer_id: int
+ :keyword type_name: Exception type name.
+ :paramtype type_name: str
+ :keyword message: Required. Exception message.
+ :paramtype message: str
+ :keyword has_full_stack: Indicates if full exception stack is provided in the exception. The
+ stack may be trimmed, such as in the case of a StackOverflow exception.
+ :paramtype has_full_stack: bool
+ :keyword stack: Text describing the stack. Either stack or parsedStack should have a value.
+ :paramtype stack: str
+ :keyword parsed_stack: List of stack frames. Either stack or parsedStack should have a value.
+ :paramtype parsed_stack: list[~azure_monitor_client.models.StackFrame]
+ """
+ super(TelemetryExceptionDetails, self).__init__(**kwargs)
+ self.id = id
+ self.outer_id = outer_id
+ self.type_name = type_name
+ self.message = message
+ self.has_full_stack = has_full_stack
+ self.stack = stack
+ self.parsed_stack = parsed_stack
+
+
+class TelemetryItem(msrest.serialization.Model):
+ """System variables for a telemetry item.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :ivar version: Envelope version. For internal use only. By assigning this the default, it will
+ not be serialized within the payload unless changed to a value other than #1.
+ :vartype version: int
+ :ivar name: Required. Type name of telemetry data item.
+ :vartype name: str
+ :ivar time: Required. Event date time when telemetry item was created. This is the wall clock
+ time on the client when the event was generated. There is no guarantee that the client's time
+ is accurate. This field must be formatted in UTC ISO 8601 format, with a trailing 'Z'
+ character, as described publicly on https://en.wikipedia.org/wiki/ISO_8601#UTC. Note: the
+ number of decimal seconds digits provided are variable (and unspecified). Consumers should
+ handle this, i.e. managed code consumers should not use format 'O' for parsing as it specifies
+ a fixed length. Example: 2009-06-15T13:45:30.0000000Z.
+ :vartype time: ~datetime.datetime
+ :ivar sample_rate: Sampling rate used in application. This telemetry item represents 100 /
+ sampleRate actual telemetry items.
+ :vartype sample_rate: float
+ :ivar sequence: Sequence field used to track absolute order of uploaded events.
+ :vartype sequence: str
+ :ivar instrumentation_key: The instrumentation key of the Application Insights resource.
+ :vartype instrumentation_key: str
+ :ivar tags: A set of tags. Key/value collection of context properties. See ContextTagKeys for
+ information on available properties.
+ :vartype tags: dict[str, str]
+ :ivar data: Telemetry data item.
+ :vartype data: ~azure_monitor_client.models.MonitorBase
+ """
+
+ _validation = {
+ "name": {"required": True},
+ "time": {"required": True},
+ "sequence": {"max_length": 64, "min_length": 0},
+ }
+
+ _attribute_map = {
+ "version": {"key": "ver", "type": "int"},
+ "name": {"key": "name", "type": "str"},
+ "time": {"key": "time", "type": "iso-8601"},
+ "sample_rate": {"key": "sampleRate", "type": "float"},
+ "sequence": {"key": "seq", "type": "str"},
+ "instrumentation_key": {"key": "iKey", "type": "str"},
+ "tags": {"key": "tags", "type": "{str}"},
+ "data": {"key": "data", "type": "MonitorBase"},
+ }
+
+ def __init__(
+ self,
+ *,
+ name: str,
+ time: datetime.datetime,
+ version: Optional[int] = 1,
+ sample_rate: Optional[float] = 100,
+ sequence: Optional[str] = None,
+ instrumentation_key: Optional[str] = None,
+ tags: Optional[Dict[str, str]] = None,
+ data: Optional["MonitorBase"] = None,
+ **kwargs
+ ):
+ """
+ :keyword version: Envelope version. For internal use only. By assigning this the default, it
+ will not be serialized within the payload unless changed to a value other than #1.
+ :paramtype version: int
+ :keyword name: Required. Type name of telemetry data item.
+ :paramtype name: str
+ :keyword time: Required. Event date time when telemetry item was created. This is the wall
+ clock time on the client when the event was generated. There is no guarantee that the client's
+ time is accurate. This field must be formatted in UTC ISO 8601 format, with a trailing 'Z'
+ character, as described publicly on https://en.wikipedia.org/wiki/ISO_8601#UTC. Note: the
+ number of decimal seconds digits provided are variable (and unspecified). Consumers should
+ handle this, i.e. managed code consumers should not use format 'O' for parsing as it specifies
+ a fixed length. Example: 2009-06-15T13:45:30.0000000Z.
+ :paramtype time: ~datetime.datetime
+ :keyword sample_rate: Sampling rate used in application. This telemetry item represents 100 /
+ sampleRate actual telemetry items.
+ :paramtype sample_rate: float
+ :keyword sequence: Sequence field used to track absolute order of uploaded events.
+ :paramtype sequence: str
+ :keyword instrumentation_key: The instrumentation key of the Application Insights resource.
+ :paramtype instrumentation_key: str
+ :keyword tags: A set of tags. Key/value collection of context properties. See ContextTagKeys
+ for information on available properties.
+ :paramtype tags: dict[str, str]
+ :keyword data: Telemetry data item.
+ :paramtype data: ~azure_monitor_client.models.MonitorBase
+ """
+ super(TelemetryItem, self).__init__(**kwargs)
+ self.version = version
+ self.name = name
+ self.time = time
+ self.sample_rate = sample_rate
+ self.sequence = sequence
+ self.instrumentation_key = instrumentation_key
+ self.tags = tags
+ self.data = data
+
+
+class TrackResponse(msrest.serialization.Model):
+ """Response containing the status of each telemetry item.
+
+ :ivar items_received: The number of items received.
+ :vartype items_received: int
+ :ivar items_accepted: The number of items accepted.
+ :vartype items_accepted: int
+ :ivar errors: An array of error detail objects.
+ :vartype errors: list[~azure_monitor_client.models.TelemetryErrorDetails]
+ """
+
+ _attribute_map = {
+ "items_received": {"key": "itemsReceived", "type": "int"},
+ "items_accepted": {"key": "itemsAccepted", "type": "int"},
+ "errors": {"key": "errors", "type": "[TelemetryErrorDetails]"},
+ }
+
+ def __init__(
+ self,
+ *,
+ items_received: Optional[int] = None,
+ items_accepted: Optional[int] = None,
+ errors: Optional[List["TelemetryErrorDetails"]] = None,
+ **kwargs
+ ):
+ """
+ :keyword items_received: The number of items received.
+ :paramtype items_received: int
+ :keyword items_accepted: The number of items accepted.
+ :paramtype items_accepted: int
+ :keyword errors: An array of error detail objects.
+ :paramtype errors: list[~azure_monitor_client.models.TelemetryErrorDetails]
+ """
+ super(TrackResponse, self).__init__(**kwargs)
+ self.items_received = items_received
+ self.items_accepted = items_accepted
+ self.errors = errors
diff --git a/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_generated/operations/__init__.py b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_generated/operations/__init__.py
new file mode 100644
index 00000000..a1b6ce78
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_generated/operations/__init__.py
@@ -0,0 +1,13 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from ._azure_monitor_client_operations import AzureMonitorClientOperationsMixin
+
+__all__ = [
+ "AzureMonitorClientOperationsMixin",
+]
diff --git a/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_generated/operations/_azure_monitor_client_operations.py b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_generated/operations/_azure_monitor_client_operations.py
new file mode 100644
index 00000000..6bc3c37e
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_generated/operations/_azure_monitor_client_operations.py
@@ -0,0 +1,140 @@
+# pylint: disable=too-many-lines
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import TYPE_CHECKING
+
+from msrest import Serializer
+
+from azure.core.exceptions import (
+ ClientAuthenticationError,
+ HttpResponseError,
+ ResourceExistsError,
+ ResourceNotFoundError,
+ map_error,
+)
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import HttpResponse
+from azure.core.rest import HttpRequest
+
+from .. import models as _models
+from .._vendor import _convert_request
+
+if TYPE_CHECKING:
+ # pylint: disable=unused-import,ungrouped-imports
+ from typing import Any, Callable, Dict, List, Optional, TypeVar
+
+ T = TypeVar("T")
+ ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
+
+_SERIALIZER = Serializer()
+_SERIALIZER.client_side_validation = False
+# fmt: off
+
+def build_track_request(
+ **kwargs # type: Any
+):
+ # type: (...) -> HttpRequest
+ content_type = kwargs.pop('content_type', None) # type: Optional[str]
+
+ accept = "application/json"
+ # Construct URL
+ _url = kwargs.pop("template_url", "/track")
+
+ # Construct headers
+ _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
+ if content_type is not None:
+ _header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str')
+ _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
+
+ return HttpRequest(
+ method="POST",
+ url=_url,
+ headers=_header_parameters,
+ **kwargs
+ )
+
+
+# fmt: on
+class AzureMonitorClientOperationsMixin(object):
+
+ def track(
+ self,
+ body, # type: List["_models.TelemetryItem"]
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "_models.TrackResponse"
+ """Track telemetry events.
+
+ This operation sends a sequence of telemetry events that will be monitored by Azure Monitor.
+
+ :param body: The list of telemetry events to track.
+ :type body: list[~azure_monitor_client.models.TelemetryItem]
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: TrackResponse, or the result of cls(response)
+ :rtype: ~azure_monitor_client.models.TrackResponse
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop("cls", None) # type: ClsType["_models.TrackResponse"]
+ error_map = {
+ 401: ClientAuthenticationError,
+ 404: ResourceNotFoundError,
+ 409: ResourceExistsError,
+ 400: lambda response: HttpResponseError(
+ response=response, model=self._deserialize(_models.TrackResponse, response)
+ ),
+ 402: lambda response: HttpResponseError(
+ response=response, model=self._deserialize(_models.TrackResponse, response)
+ ),
+ 429: lambda response: HttpResponseError(
+ response=response, model=self._deserialize(_models.TrackResponse, response)
+ ),
+ 500: lambda response: HttpResponseError(
+ response=response, model=self._deserialize(_models.TrackResponse, response)
+ ),
+ 503: lambda response: HttpResponseError(
+ response=response, model=self._deserialize(_models.TrackResponse, response)
+ ),
+ }
+ error_map.update(kwargs.pop("error_map", {}))
+
+ content_type = kwargs.pop("content_type", "application/json") # type: Optional[str]
+
+ _json = self._serialize.body(body, "[TelemetryItem]")
+
+ request = build_track_request(
+ content_type=content_type,
+ json=_json,
+ template_url=self.track.metadata["url"],
+ )
+ request = _convert_request(request)
+ path_format_arguments = {
+ "Host": self._serialize.url("self._config.host", self._config.host, "str", skip_quote=True),
+ }
+ request.url = self._client.format_url(request.url, **path_format_arguments)
+
+ pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access
+ request, stream=False, **kwargs
+ )
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 206]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response)
+
+ if response.status_code == 200:
+ deserialized = self._deserialize("TrackResponse", pipeline_response)
+
+ if response.status_code == 206:
+ deserialized = self._deserialize("TrackResponse", pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+
+ track.metadata = {"url": "/track"} # type: ignore
diff --git a/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_generated/py.typed b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_generated/py.typed
new file mode 100644
index 00000000..e5aff4f8
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_generated/py.typed
@@ -0,0 +1 @@
+# Marker file for PEP 561. \ No newline at end of file
diff --git a/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/__init__.py b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/__init__.py
new file mode 100644
index 00000000..39d410a7
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/__init__.py
@@ -0,0 +1,11 @@
+# -------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License in the project root for
+# license information.
+# -------------------------------------------------------------------------
+
+from azure.monitor.opentelemetry.exporter._quickpulse._live_metrics import enable_live_metrics
+
+__all__ = [
+ "enable_live_metrics",
+]
diff --git a/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_constants.py b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_constants.py
new file mode 100644
index 00000000..29c03e8b
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_constants.py
@@ -0,0 +1,66 @@
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License.
+import sys
+
+# cSpell:disable
+
+# (OpenTelemetry metric name, Quickpulse metric name)
+# Memory
+_COMMITTED_BYTES_NAME = ("azuremonitor.memorycommittedbytes", "\\Memory\\Committed Bytes")
+_PROCESS_PHYSICAL_BYTES_NAME = ("azuremonitor.processphysicalbytes", "\\Process\\Physical Bytes")
+# CPU
+_PROCESSOR_TIME_NAME = ("azuremonitor.processortotalprocessortime", "\\Processor(_Total)\\% Processor Time")
+_PROCESS_TIME_NORMALIZED_NAME = ("azuremonitor.processtimenormalized", "\\% Process\\Processor Time Normalized")
+# Request
+_REQUEST_RATE_NAME = ("azuremonitor.requestssec", "\\ApplicationInsights\\Requests/Sec")
+_REQUEST_FAILURE_RATE_NAME = ("azuremonitor.requestsfailedsec", "\\ApplicationInsights\\Requests Failed/Sec")
+_REQUEST_DURATION_NAME = ("azuremonitor.requestduration", "\\ApplicationInsights\\Request Duration")
+# Dependency
+_DEPENDENCY_RATE_NAME = ("azuremonitor.dependencycallssec", "\\ApplicationInsights\\Dependency Calls/Sec")
+_DEPENDENCY_FAILURE_RATE_NAME = (
+ "azuremonitor.dependencycallsfailedsec",
+ "\\ApplicationInsights\\Dependency Calls Failed/Sec",
+)
+_DEPENDENCY_DURATION_NAME = ("azuremonitor.dependencycallduration", "\\ApplicationInsights\\Dependency Call Duration")
+# Exception
+_EXCEPTION_RATE_NAME = ("azuremonitor.exceptionssec", "\\ApplicationInsights\\Exceptions/Sec")
+
+_QUICKPULSE_METRIC_NAME_MAPPINGS = dict(
+ [
+ _COMMITTED_BYTES_NAME,
+ _PROCESS_PHYSICAL_BYTES_NAME,
+ _PROCESSOR_TIME_NAME,
+ _PROCESS_TIME_NORMALIZED_NAME,
+ _REQUEST_RATE_NAME,
+ _REQUEST_FAILURE_RATE_NAME,
+ _REQUEST_DURATION_NAME,
+ _DEPENDENCY_RATE_NAME,
+ _DEPENDENCY_FAILURE_RATE_NAME,
+ _DEPENDENCY_DURATION_NAME,
+ _EXCEPTION_RATE_NAME,
+ ]
+)
+
+# Quickpulse intervals
+_SHORT_PING_INTERVAL_SECONDS = 5
+_POST_INTERVAL_SECONDS = 1
+_LONG_PING_INTERVAL_SECONDS = 60
+_POST_CANCEL_INTERVAL_SECONDS = 20
+
+# Response Headers
+
+_QUICKPULSE_ETAG_HEADER_NAME = "x-ms-qps-configuration-etag"
+_QUICKPULSE_POLLING_HEADER_NAME = "x-ms-qps-service-polling-interval-hint"
+_QUICKPULSE_REDIRECT_HEADER_NAME = "x-ms-qps-service-endpoint-redirect-v2"
+_QUICKPULSE_SUBSCRIBED_HEADER_NAME = "x-ms-qps-subscribed"
+
+# Projections (filtering)
+
+_QUICKPULSE_PROJECTION_COUNT = "Count()"
+_QUICKPULSE_PROJECTION_DURATION = "Duration"
+_QUICKPULSE_PROJECTION_CUSTOM = "CustomDimensions."
+
+_QUICKPULSE_PROJECTION_MAX_VALUE = sys.maxsize
+_QUICKPULSE_PROJECTION_MIN_VALUE = -sys.maxsize - 1
+
+# cSpell:enable
diff --git a/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_cpu.py b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_cpu.py
new file mode 100644
index 00000000..08dce9d0
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_cpu.py
@@ -0,0 +1,63 @@
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License.
+# cSpell:disable
+
+from datetime import datetime
+from typing import Iterable
+
+import psutil
+
+from opentelemetry.metrics import CallbackOptions, Observation
+
+from azure.monitor.opentelemetry.exporter._quickpulse._state import (
+ _get_quickpulse_last_process_cpu,
+ _get_quickpulse_last_process_time,
+ _get_quickpulse_process_elapsed_time,
+ _set_quickpulse_last_process_cpu,
+ _set_quickpulse_last_process_time,
+ _set_quickpulse_process_elapsed_time,
+)
+
+PROCESS = psutil.Process()
+NUM_CPUS = psutil.cpu_count()
+
+
+# pylint: disable=unused-argument
+def _get_process_memory(options: CallbackOptions) -> Iterable[Observation]:
+ memory = 0
+ try:
+ # rss is non-swapped physical memory a process has used
+ memory = PROCESS.memory_info().rss
+ except (psutil.NoSuchProcess, psutil.AccessDenied):
+ pass
+ yield Observation(memory, {})
+
+
+# pylint: disable=unused-argument
+def _get_process_time_normalized_old(options: CallbackOptions) -> Iterable[Observation]:
+ normalized_cpu_percentage = 0.0
+ try:
+ cpu_times = PROCESS.cpu_times()
+ # total process time is user + system in s
+ total_time_s = cpu_times.user + cpu_times.system
+ process_time_s = total_time_s - _get_quickpulse_last_process_time()
+ _set_quickpulse_last_process_time(process_time_s)
+ # Find elapsed time in s since last collection
+ current_time = datetime.now()
+ elapsed_time_s = (current_time - _get_quickpulse_process_elapsed_time()).total_seconds()
+ _set_quickpulse_process_elapsed_time(current_time)
+ # Obtain cpu % by dividing by elapsed time
+ cpu_percentage = process_time_s / elapsed_time_s
+ # Normalize by dividing by amount of logical cpus
+ normalized_cpu_percentage = cpu_percentage / NUM_CPUS
+ _set_quickpulse_last_process_cpu(normalized_cpu_percentage)
+ except (psutil.NoSuchProcess, psutil.AccessDenied, ZeroDivisionError):
+ pass
+ yield Observation(normalized_cpu_percentage, {})
+
+
+# pylint: disable=unused-argument
+def _get_process_time_normalized(options: CallbackOptions) -> Iterable[Observation]:
+ yield Observation(_get_quickpulse_last_process_cpu(), {})
+
+# cSpell:enable
diff --git a/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_exporter.py b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_exporter.py
new file mode 100644
index 00000000..f61cebe5
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_exporter.py
@@ -0,0 +1,351 @@
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License.
+import logging
+from typing import Any, Optional
+import weakref
+
+from opentelemetry.context import (
+ _SUPPRESS_INSTRUMENTATION_KEY,
+ attach,
+ detach,
+ set_value,
+)
+from opentelemetry.sdk.metrics import (
+ Counter,
+ Histogram,
+)
+from opentelemetry.sdk.metrics._internal.point import MetricsData
+from opentelemetry.sdk.metrics.export import (
+ AggregationTemporality,
+ MetricExporter,
+ MetricExportResult,
+ MetricsData as OTMetricsData,
+ MetricReader,
+)
+
+from azure.core.pipeline.policies import ContentDecodePolicy
+from azure.monitor.opentelemetry.exporter._quickpulse._constants import (
+ _LONG_PING_INTERVAL_SECONDS,
+ _POST_CANCEL_INTERVAL_SECONDS,
+ _POST_INTERVAL_SECONDS,
+ _QUICKPULSE_ETAG_HEADER_NAME,
+ _QUICKPULSE_SUBSCRIBED_HEADER_NAME,
+)
+from azure.monitor.opentelemetry.exporter._quickpulse._generated._configuration import QuickpulseClientConfiguration
+from azure.monitor.opentelemetry.exporter._quickpulse._generated._client import QuickpulseClient
+from azure.monitor.opentelemetry.exporter._quickpulse._generated.models import MonitoringDataPoint
+from azure.monitor.opentelemetry.exporter._quickpulse._filter import _update_filter_configuration
+from azure.monitor.opentelemetry.exporter._quickpulse._policy import _QuickpulseRedirectPolicy
+from azure.monitor.opentelemetry.exporter._quickpulse._state import (
+ _get_and_clear_quickpulse_documents,
+ _get_global_quickpulse_state,
+ _get_quickpulse_etag,
+ _is_ping_state,
+ _set_global_quickpulse_state,
+ _set_quickpulse_etag,
+ _QuickpulseState,
+)
+from azure.monitor.opentelemetry.exporter._quickpulse._utils import (
+ _metric_to_quick_pulse_data_points,
+)
+from azure.monitor.opentelemetry.exporter._connection_string_parser import ConnectionStringParser
+from azure.monitor.opentelemetry.exporter._utils import (
+ _get_auth_policy,
+ _ticks_since_dot_net_epoch,
+ PeriodicTask,
+)
+
+
+_logger = logging.getLogger(__name__)
+
+
+_QUICKPULSE_METRIC_TEMPORALITIES = {
+ # Use DELTA temporalities because we want to reset the counts every collection interval
+ Counter: AggregationTemporality.DELTA,
+ Histogram: AggregationTemporality.DELTA,
+}
+
+
+class _Response:
+ """Response that encapsulates pipeline response and response headers from
+ QuickPulse client.
+ """
+
+ def __init__(self, pipeline_response, deserialized, response_headers):
+ self._pipeline_response = pipeline_response
+ self._deserialized = deserialized
+ self._response_headers = response_headers
+
+
+class _UnsuccessfulQuickPulsePostError(Exception):
+ """Exception raised to indicate unsuccessful QuickPulse post for backoff logic."""
+
+
+class _QuickpulseExporter(MetricExporter):
+
+ def __init__(self, **kwargs: Any) -> None:
+ """Metric exporter for Quickpulse.
+
+ :param str connection_string: The connection string used for your Application Insights resource.
+ :keyword TokenCredential credential: Token credential, such as ManagedIdentityCredential or
+ ClientSecretCredential, used for Azure Active Directory (AAD) authentication. Defaults to None.
+ :rtype: None
+ """
+ parsed_connection_string = ConnectionStringParser(kwargs.get("connection_string"))
+
+ self._live_endpoint = parsed_connection_string.live_endpoint
+ self._instrumentation_key = parsed_connection_string.instrumentation_key
+ self._credential = kwargs.get("credential")
+ config = QuickpulseClientConfiguration(credential=self._credential) # type: ignore
+ qp_redirect_policy = _QuickpulseRedirectPolicy(permit_redirects=False)
+ policies = [
+ # Custom redirect policy for QP
+ qp_redirect_policy,
+ # Needed for serialization
+ ContentDecodePolicy(),
+ # Logging for client calls
+ config.http_logging_policy,
+ _get_auth_policy(self._credential, config.authentication_policy),
+ config.authentication_policy,
+ # Explicitly disabling to avoid tracing live metrics calls
+ # DistributedTracingPolicy(),
+ ]
+ self._client = QuickpulseClient(
+ credential=self._credential, endpoint=self._live_endpoint, policies=policies # type: ignore
+ )
+ # Create a weakref of the client to the redirect policy so the endpoint can be
+ # dynamically modified if redirect does occur
+ qp_redirect_policy._qp_client_ref = weakref.ref(self._client)
+
+ MetricExporter.__init__(
+ self,
+ preferred_temporality=_QUICKPULSE_METRIC_TEMPORALITIES, # type: ignore
+ )
+
+ def export(
+ self,
+ metrics_data: OTMetricsData,
+ timeout_millis: float = 10_000,
+ **kwargs: Any,
+ ) -> MetricExportResult:
+ """Exports a batch of metric data
+
+ :param metrics_data: OpenTelemetry Metric(s) to export.
+ :type metrics_data: ~opentelemetry.sdk.metrics._internal.point.MetricsData
+ :param timeout_millis: The maximum amount of time to wait for each export. Not currently used.
+ :type timeout_millis: float
+ :return: The result of the export.
+ :rtype: ~opentelemetry.sdk.metrics.export.MetricExportResult
+ """
+ result = MetricExportResult.SUCCESS
+ base_monitoring_data_point = kwargs.get("base_monitoring_data_point")
+ if base_monitoring_data_point is None:
+ return MetricExportResult.FAILURE
+ data_points = _metric_to_quick_pulse_data_points(
+ metrics_data,
+ base_monitoring_data_point=base_monitoring_data_point,
+ documents=_get_and_clear_quickpulse_documents(),
+ )
+ configuration_etag = _get_quickpulse_etag() or ""
+ token = attach(set_value(_SUPPRESS_INSTRUMENTATION_KEY, True))
+ # pylint: disable=R1702
+ try:
+ post_response = self._client.publish( # type: ignore
+ endpoint=self._live_endpoint,
+ monitoring_data_points=data_points,
+ ikey=self._instrumentation_key, # type: ignore
+ configuration_etag=configuration_etag,
+ transmission_time=_ticks_since_dot_net_epoch(),
+ cls=_Response,
+ )
+ if not post_response:
+ # If no response, assume unsuccessful
+ result = MetricExportResult.FAILURE
+ else:
+ header = post_response._response_headers.get( # pylint: disable=protected-access
+ _QUICKPULSE_SUBSCRIBED_HEADER_NAME
+ )
+ if header != "true":
+ # User leaving the live metrics page will be treated as an unsuccessful
+ result = MetricExportResult.FAILURE
+ else:
+ # Check if etag has changed
+ etag = post_response._response_headers.get( # pylint: disable=protected-access
+ _QUICKPULSE_ETAG_HEADER_NAME
+ )
+ if etag and etag != configuration_etag:
+ config = (
+ post_response._pipeline_response.http_response.content # pylint: disable=protected-access
+ )
+ # Content will only be populated if configuration has changed (etag is different)
+ if config:
+ # Update and apply configuration changes
+ try:
+ _update_filter_configuration(etag, config)
+ except Exception: # pylint: disable=broad-except
+ _logger.exception("Exception occurred while updating filter config.")
+ result = MetricExportResult.FAILURE
+ except Exception: # pylint: disable=broad-except
+ _logger.exception("Exception occurred while publishing live metrics.")
+ result = MetricExportResult.FAILURE
+ finally:
+ detach(token)
+ return result
+
+ def force_flush(
+ self,
+ timeout_millis: float = 10_000,
+ ) -> bool:
+ """
+ Ensure that export of any metrics currently received by the exporter
+ are completed as soon as possible. Called when SDK is flushed.
+
+ :param timeout_millis: The maximum amount of time to wait for shutdown. Not currently used.
+ :type timeout_millis: float
+ :return: The result of the export.
+ :rtype: bool
+ """
+ return True
+
+ def shutdown(
+ self,
+ timeout_millis: float = 30_000,
+ **kwargs: Any,
+ ) -> None:
+ """Shuts down the exporter.
+
+ Called when the SDK is shut down.
+
+ :param timeout_millis: The maximum amount of time to wait for shutdown. Not currently used.
+ :type timeout_millis: float
+ """
+
+ def _ping(self, monitoring_data_point: MonitoringDataPoint) -> Optional[_Response]:
+ ping_response = None
+ token = attach(set_value(_SUPPRESS_INSTRUMENTATION_KEY, True))
+ etag = _get_quickpulse_etag() or ""
+ try:
+ ping_response = self._client.is_subscribed( # type: ignore
+ endpoint=self._live_endpoint,
+ monitoring_data_point=monitoring_data_point,
+ ikey=self._instrumentation_key, # type: ignore
+ transmission_time=_ticks_since_dot_net_epoch(),
+ machine_name=monitoring_data_point.machine_name,
+ instance_name=monitoring_data_point.instance,
+ stream_id=monitoring_data_point.stream_id,
+ role_name=monitoring_data_point.role_name,
+ invariant_version=monitoring_data_point.invariant_version, # type: ignore
+ configuration_etag=etag,
+ cls=_Response,
+ )
+ return ping_response # type: ignore
+ except Exception: # pylint: disable=broad-except
+ _logger.exception("Exception occurred while pinging live metrics.")
+ detach(token)
+ return ping_response
+
+
+class _QuickpulseMetricReader(MetricReader):
+
+ def __init__(
+ self,
+ exporter: _QuickpulseExporter,
+ base_monitoring_data_point: MonitoringDataPoint,
+ ) -> None:
+ self._exporter = exporter
+ self._base_monitoring_data_point = base_monitoring_data_point
+ self._elapsed_num_seconds = 0
+ self._worker = PeriodicTask(
+ interval=_POST_INTERVAL_SECONDS,
+ function=self._ticker,
+ name="QuickpulseMetricReader",
+ )
+ self._worker.daemon = True
+ super().__init__(
+ preferred_temporality=self._exporter._preferred_temporality,
+ preferred_aggregation=self._exporter._preferred_aggregation,
+ )
+ self._worker.start()
+
+ # pylint: disable=protected-access
+ # pylint: disable=too-many-nested-blocks
+ def _ticker(self) -> None:
+ if _is_ping_state():
+ # Send a ping if elapsed number of request meets the threshold
+ if self._elapsed_num_seconds % _get_global_quickpulse_state().value == 0:
+ ping_response = self._exporter._ping(
+ self._base_monitoring_data_point,
+ )
+ if ping_response:
+ try:
+ subscribed = ping_response._response_headers.get(_QUICKPULSE_SUBSCRIBED_HEADER_NAME)
+ if subscribed and subscribed == "true":
+ # Switch state to post if subscribed
+ _set_global_quickpulse_state(_QuickpulseState.POST_SHORT)
+ self._elapsed_num_seconds = 0
+ # Update config etag
+ etag = ping_response._response_headers.get(_QUICKPULSE_ETAG_HEADER_NAME)
+ if etag is None:
+ etag = ""
+ if _get_quickpulse_etag() != etag:
+ _set_quickpulse_etag(etag)
+ # TODO: Set default document filter config from response body
+ # config = ping_response._pipeline_response.http_response.content
+ else:
+ # Backoff after _LONG_PING_INTERVAL_SECONDS (60s) of no successful requests
+ if (
+ _get_global_quickpulse_state() is _QuickpulseState.PING_SHORT
+ and self._elapsed_num_seconds >= _LONG_PING_INTERVAL_SECONDS
+ ):
+ _set_global_quickpulse_state(_QuickpulseState.PING_LONG)
+ # Reset etag to default if not subscribed
+ _set_quickpulse_etag("")
+ except Exception: # pylint: disable=broad-except
+ _logger.exception("Exception occurred while reading live metrics ping response.")
+ _set_quickpulse_etag("")
+ # TODO: Implement redirect
+ else:
+ # Erroneous ping responses instigate backoff logic
+ # Backoff after _LONG_PING_INTERVAL_SECONDS (60s) of no successful requests
+ if (
+ _get_global_quickpulse_state() is _QuickpulseState.PING_SHORT
+ and self._elapsed_num_seconds >= _LONG_PING_INTERVAL_SECONDS
+ ):
+ _set_global_quickpulse_state(_QuickpulseState.PING_LONG)
+ # Reset etag to default if error
+ _set_quickpulse_etag("")
+ else:
+ try:
+ self.collect()
+ except _UnsuccessfulQuickPulsePostError:
+ # Unsuccessful posts instigate backoff logic
+ # Backoff after _POST_CANCEL_INTERVAL_SECONDS (20s) of no successful requests
+ # And resume pinging
+ if self._elapsed_num_seconds >= _POST_CANCEL_INTERVAL_SECONDS:
+ _set_global_quickpulse_state(_QuickpulseState.PING_SHORT)
+ # Reset etag to default
+ _set_quickpulse_etag("")
+ self._elapsed_num_seconds = 0
+
+ self._elapsed_num_seconds += 1
+
+ def _receive_metrics(
+ self,
+ metrics_data: MetricsData,
+ timeout_millis: float = 10_000,
+ **kwargs,
+ ) -> None:
+ result = self._exporter.export(
+ metrics_data,
+ timeout_millis=timeout_millis,
+ base_monitoring_data_point=self._base_monitoring_data_point,
+ )
+ if result is MetricExportResult.FAILURE:
+ # There is currently no way to propagate unsuccessful metric post so
+ # we raise an _UnsuccessfulQuickPulsePostError exception. MUST handle
+ # this exception whenever `collect()` is called
+ raise _UnsuccessfulQuickPulsePostError()
+
+ def shutdown(self, timeout_millis: float = 30_000, **kwargs) -> None:
+ self._worker.cancel()
+ self._worker.join()
diff --git a/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_filter.py b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_filter.py
new file mode 100644
index 00000000..59e85eda
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_filter.py
@@ -0,0 +1,194 @@
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License.
+import json
+
+from dataclasses import fields
+from typing import Any, Dict, List
+
+from azure.monitor.opentelemetry.exporter._quickpulse._generated.models import (
+ DerivedMetricInfo,
+ DocumentStreamInfo,
+ FilterConjunctionGroupInfo,
+ FilterInfo,
+ PredicateType,
+ TelemetryType,
+)
+from azure.monitor.opentelemetry.exporter._quickpulse._projection import (
+ _init_derived_metric_projection,
+)
+from azure.monitor.opentelemetry.exporter._quickpulse._state import (
+ _clear_quickpulse_projection_map,
+ _set_quickpulse_derived_metric_infos,
+ _set_quickpulse_doc_stream_infos,
+ _set_quickpulse_etag,
+)
+from azure.monitor.opentelemetry.exporter._quickpulse._types import (
+ _DATA_FIELD_NAMES,
+ _TelemetryData,
+)
+from azure.monitor.opentelemetry.exporter._quickpulse._utils import _filter_time_stamp_to_ms
+from azure.monitor.opentelemetry.exporter._quickpulse._validate import (
+ _validate_derived_metric_info,
+ _validate_document_filter_group_info,
+)
+
+
+# Apply filter configuration based off response
+# Called on post response from exporter
+def _update_filter_configuration(etag: str, config_bytes: bytes):
+ # Clear projection map
+ _clear_quickpulse_projection_map()
+ # config is a byte string that when decoded is a json
+ config = json.loads(config_bytes.decode("utf-8"))
+ # Process metric filter configuration
+ _parse_metric_filter_configuration(config)
+ # # Process document filter configuration
+ _parse_document_filter_configuration(config)
+ # Update new etag
+ _set_quickpulse_etag(etag)
+
+
+def _parse_metric_filter_configuration(config: Dict[str, Any]) -> None:
+ seen_ids = set()
+ # Process metric filter configuration
+ metric_infos: Dict[TelemetryType, List[DerivedMetricInfo]] = {}
+ for metric_info_dict in config.get("Metrics", []):
+ metric_info = DerivedMetricInfo.from_dict(metric_info_dict)
+ # Skip duplicate ids
+ if metric_info.id in seen_ids:
+ continue
+ if not _validate_derived_metric_info(metric_info):
+ continue
+ # Rename exception fields by parsing out "Exception." portion
+ for filter_group in metric_info.filter_groups:
+ _rename_exception_fields_for_filtering(filter_group)
+ telemetry_type: TelemetryType = TelemetryType(metric_info.telemetry_type)
+ metric_info_list = metric_infos.get(telemetry_type, [])
+ metric_info_list.append(metric_info)
+ metric_infos[telemetry_type] = metric_info_list
+ seen_ids.add(metric_info.id)
+ # Initialize projections from this derived metric info
+ _init_derived_metric_projection(metric_info)
+ _set_quickpulse_derived_metric_infos(metric_infos)
+
+
+def _parse_document_filter_configuration(config: Dict[str, Any]) -> None:
+ # Process document filter configuration
+ doc_infos: Dict[TelemetryType, Dict[str, List[FilterConjunctionGroupInfo]]] = {}
+ for doc_stream_dict in config.get("DocumentStreams", []):
+ doc_stream = DocumentStreamInfo.from_dict(doc_stream_dict)
+ for doc_filter_group in doc_stream.document_filter_groups:
+ if not _validate_document_filter_group_info(doc_filter_group):
+ continue
+ # Rename exception fields by parsing out "Exception." portion
+ _rename_exception_fields_for_filtering(doc_filter_group.filters)
+ telemetry_type: TelemetryType = TelemetryType(doc_filter_group.telemetry_type)
+ if telemetry_type not in doc_infos:
+ doc_infos[telemetry_type] = {}
+ if doc_stream.id not in doc_infos[telemetry_type]:
+ doc_infos[telemetry_type][doc_stream.id] = []
+ doc_infos[telemetry_type][doc_stream.id].append(doc_filter_group.filters)
+ _set_quickpulse_doc_stream_infos(doc_infos)
+
+
+def _rename_exception_fields_for_filtering(filter_groups: FilterConjunctionGroupInfo):
+ for filter in filter_groups.filters:
+ if filter.field_name.startswith("Exception."):
+ filter.field_name = filter.field_name.replace("Exception.", "")
+
+
+def _check_metric_filters(metric_infos: List[DerivedMetricInfo], data: _TelemetryData) -> bool:
+ match = False
+ for metric_info in metric_infos:
+ # Should only be a single `FilterConjunctionGroupInfo` in `filter_groups`
+ # but we use a logical OR to match if there is more than one
+ for group in metric_info.filter_groups:
+ match = match or _check_filters(group.filters, data)
+ return match
+
+
+# pylint: disable=R0911
+def _check_filters(filters: List[FilterInfo], data: _TelemetryData) -> bool:
+ if not filters:
+ return True
+ # # All of the filters need to match for this to return true (and operation).
+ for filter in filters:
+ name = filter.field_name
+ predicate = filter.predicate
+ comparand = filter.comparand
+ if name == "*":
+ return _check_any_field_filter(filter, data)
+ if name.startswith("CustomDimensions."):
+ return _check_custom_dim_field_filter(filter, data.custom_dimensions)
+ field_names = _DATA_FIELD_NAMES.get(type(data))
+ if field_names is None:
+ field_names = {}
+ field_name = field_names.get(name.lower(), "")
+ val = getattr(data, field_name, "")
+ if name == "Success":
+ if predicate == PredicateType.EQUAL:
+ return str(val).lower() == comparand.lower()
+ if predicate == PredicateType.NOT_EQUAL:
+ return str(val).lower() != comparand.lower()
+ elif name in ("ResultCode", "ResponseCode", "Duration"):
+ try:
+ val = int(val)
+ except Exception: # pylint: disable=broad-exception-caught
+ return False
+ numerical_val = _filter_time_stamp_to_ms(comparand) if name == "Duration" else int(comparand)
+ if numerical_val is None:
+ return False
+ if predicate == PredicateType.EQUAL:
+ return val == numerical_val
+ if predicate == PredicateType.NOT_EQUAL:
+ return val != numerical_val
+ if predicate == PredicateType.GREATER_THAN:
+ return val > numerical_val
+ if predicate == PredicateType.GREATER_THAN_OR_EQUAL:
+ return val >= numerical_val
+ if predicate == PredicateType.LESS_THAN:
+ return val < numerical_val
+ if predicate == PredicateType.LESS_THAN_OR_EQUAL:
+ return val <= numerical_val
+ return False
+ else:
+ # string fields
+ return _field_string_compare(str(val), comparand, predicate)
+
+ return False
+
+
+def _check_any_field_filter(filter: FilterInfo, data: _TelemetryData) -> bool:
+ # At this point, the only predicates possible to pass in are Contains and DoesNotContain
+ # At config validation time the predicate is checked to be one of these two.
+ for field in fields(data):
+ if field.name == "custom_dimensions":
+ for val in data.custom_dimensions.values():
+ if _field_string_compare(str(val), filter.comparand, filter.predicate):
+ return True
+ else:
+ val = getattr(data, field.name, None) # type: ignore
+ if val is not None:
+ if _field_string_compare(str(val), filter.comparand, filter.predicate):
+ return True
+ return False
+
+
+def _check_custom_dim_field_filter(filter: FilterInfo, custom_dimensions: Dict[str, str]) -> bool:
+ field = filter.field_name.replace("CustomDimensions.", "")
+ value = custom_dimensions.get(field)
+ if value is not None:
+ return _field_string_compare(str(value), filter.comparand, filter.predicate)
+ return False
+
+
+def _field_string_compare(value: str, comparand: str, predicate: str) -> bool:
+ if predicate == PredicateType.EQUAL:
+ return value == comparand
+ if predicate == PredicateType.NOT_EQUAL:
+ return value != comparand
+ if predicate == PredicateType.CONTAINS:
+ return comparand.lower() in value.lower()
+ if predicate == PredicateType.DOES_NOT_CONTAIN:
+ return comparand.lower() not in value.lower()
+ return False
diff --git a/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/__init__.py b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/__init__.py
new file mode 100644
index 00000000..664b539c
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/__init__.py
@@ -0,0 +1,23 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from ._client import QuickpulseClient
+
+try:
+ from ._patch import __all__ as _patch_all
+ from ._patch import * # pylint: disable=unused-wildcard-import
+except ImportError:
+ _patch_all = []
+from ._patch import patch_sdk as _patch_sdk
+
+__all__ = [
+ "QuickpulseClient",
+]
+__all__.extend([p for p in _patch_all if p not in __all__])
+
+_patch_sdk()
diff --git a/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/_client.py b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/_client.py
new file mode 100644
index 00000000..d477f7d4
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/_client.py
@@ -0,0 +1,93 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from copy import deepcopy
+from typing import Any, TYPE_CHECKING
+
+from azure.core import PipelineClient
+from azure.core.pipeline import policies
+from azure.core.rest import HttpRequest, HttpResponse
+
+from . import models as _models
+from ._configuration import QuickpulseClientConfiguration
+from ._operations import QuickpulseClientOperationsMixin
+from ._serialization import Deserializer, Serializer
+
+if TYPE_CHECKING:
+ # pylint: disable=unused-import,ungrouped-imports
+ from azure.core.credentials import TokenCredential
+
+
+class QuickpulseClient(QuickpulseClientOperationsMixin): # pylint: disable=client-accepts-api-version-keyword
+ """Quickpulse Client.
+
+ :param credential: Credential needed for the client to connect to Azure. Required.
+ :type credential: ~azure.core.credentials.TokenCredential
+ :keyword api_version: Api Version. Default value is "2024-04-01-preview". Note that overriding
+ this default value may result in unsupported behavior.
+ :paramtype api_version: str
+ """
+
+ def __init__(self, credential: "TokenCredential", **kwargs: Any) -> None:
+ _endpoint = "{endpoint}"
+ self._config = QuickpulseClientConfiguration(credential=credential, **kwargs)
+ _policies = kwargs.pop("policies", None)
+ if _policies is None:
+ _policies = [
+ policies.RequestIdPolicy(**kwargs),
+ self._config.headers_policy,
+ self._config.user_agent_policy,
+ self._config.proxy_policy,
+ policies.ContentDecodePolicy(**kwargs),
+ self._config.redirect_policy,
+ self._config.retry_policy,
+ self._config.authentication_policy,
+ self._config.custom_hook_policy,
+ self._config.logging_policy,
+ policies.DistributedTracingPolicy(**kwargs),
+ policies.SensitiveHeaderCleanupPolicy(**kwargs) if self._config.redirect_policy else None,
+ self._config.http_logging_policy,
+ ]
+ self._client: PipelineClient = PipelineClient(base_url=_endpoint, policies=_policies, **kwargs)
+
+ client_models = {k: v for k, v in _models.__dict__.items() if isinstance(v, type)}
+ self._serialize = Serializer(client_models)
+ self._deserialize = Deserializer(client_models)
+ self._serialize.client_side_validation = False
+
+ def send_request(self, request: HttpRequest, *, stream: bool = False, **kwargs: Any) -> HttpResponse:
+ """Runs the network request through the client's chained policies.
+
+ >>> from azure.core.rest import HttpRequest
+ >>> request = HttpRequest("GET", "https://www.example.org/")
+ <HttpRequest [GET], url: 'https://www.example.org/'>
+ >>> response = client.send_request(request)
+ <HttpResponse: 200 OK>
+
+ For more information on this code flow, see https://aka.ms/azsdk/dpcodegen/python/send_request
+
+ :param request: The network request you want to make. Required.
+ :type request: ~azure.core.rest.HttpRequest
+ :keyword bool stream: Whether the response payload will be streamed. Defaults to False.
+ :return: The response of your network call. Does not do error handling on your response.
+ :rtype: ~azure.core.rest.HttpResponse
+ """
+
+ request_copy = deepcopy(request)
+ request_copy.url = self._client.format_url(request_copy.url)
+ return self._client.send_request(request_copy, stream=stream, **kwargs) # type: ignore
+
+ def close(self) -> None:
+ self._client.close()
+
+ def __enter__(self) -> "QuickpulseClient":
+ self._client.__enter__()
+ return self
+
+ def __exit__(self, *exc_details: Any) -> None:
+ self._client.__exit__(*exc_details)
diff --git a/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/_configuration.py b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/_configuration.py
new file mode 100644
index 00000000..bad8b6a9
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/_configuration.py
@@ -0,0 +1,59 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from typing import Any, TYPE_CHECKING
+
+from azure.core.pipeline import policies
+
+if TYPE_CHECKING:
+ # pylint: disable=unused-import,ungrouped-imports
+ from azure.core.credentials import TokenCredential
+
+VERSION = "unknown"
+
+
+class QuickpulseClientConfiguration: # pylint: disable=too-many-instance-attributes,name-too-long
+ """Configuration for QuickpulseClient.
+
+ Note that all parameters used to create this instance are saved as instance
+ attributes.
+
+ :param credential: Credential needed for the client to connect to Azure. Required.
+ :type credential: ~azure.core.credentials.TokenCredential
+ :keyword api_version: Api Version. Default value is "2024-04-01-preview". Note that overriding
+ this default value may result in unsupported behavior.
+ :paramtype api_version: str
+ """
+
+ def __init__(self, credential: "TokenCredential", **kwargs: Any) -> None:
+ api_version: str = kwargs.pop("api_version", "2024-04-01-preview")
+
+ # if credential is None:
+ # raise ValueError("Parameter 'credential' must not be None.")
+
+ self.credential = credential
+ self.api_version = api_version
+ self.credential_scopes = kwargs.pop("credential_scopes", ["https://monitor.azure.com/.default"])
+ kwargs.setdefault("sdk_moniker", "quickpulseclient/{}".format(VERSION))
+ self.polling_interval = kwargs.get("polling_interval", 30)
+ self._configure(**kwargs)
+
+ def _configure(self, **kwargs: Any) -> None:
+ self.user_agent_policy = kwargs.get("user_agent_policy") or policies.UserAgentPolicy(**kwargs)
+ self.headers_policy = kwargs.get("headers_policy") or policies.HeadersPolicy(**kwargs)
+ self.proxy_policy = kwargs.get("proxy_policy") or policies.ProxyPolicy(**kwargs)
+ self.logging_policy = kwargs.get("logging_policy") or policies.NetworkTraceLoggingPolicy(**kwargs)
+ self.http_logging_policy = kwargs.get("http_logging_policy") or policies.HttpLoggingPolicy(**kwargs)
+ self.custom_hook_policy = kwargs.get("custom_hook_policy") or policies.CustomHookPolicy(**kwargs)
+ self.redirect_policy = kwargs.get("redirect_policy") or policies.RedirectPolicy(**kwargs)
+ self.retry_policy = kwargs.get("retry_policy") or policies.RetryPolicy(**kwargs)
+ self.authentication_policy = kwargs.get("authentication_policy")
+ # if self.credential and not self.authentication_policy:
+ # self.authentication_policy = policies.BearerTokenCredentialPolicy(
+ # self.credential, *self.credential_scopes, **kwargs
+ # )
diff --git a/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/_operations/__init__.py b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/_operations/__init__.py
new file mode 100644
index 00000000..3d1697f9
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/_operations/__init__.py
@@ -0,0 +1,19 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from ._operations import QuickpulseClientOperationsMixin
+
+from ._patch import __all__ as _patch_all
+from ._patch import * # pylint: disable=unused-wildcard-import
+from ._patch import patch_sdk as _patch_sdk
+
+__all__ = [
+ "QuickpulseClientOperationsMixin",
+]
+__all__.extend([p for p in _patch_all if p not in __all__])
+_patch_sdk()
diff --git a/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/_operations/_operations.py b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/_operations/_operations.py
new file mode 100644
index 00000000..270eb34f
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/_operations/_operations.py
@@ -0,0 +1,544 @@
+# pylint: disable=too-many-lines,too-many-statements
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from io import IOBase
+import sys
+from typing import Any, Callable, Dict, IO, List, Optional, Type, TypeVar, Union, overload
+
+from azure.core.exceptions import (
+ ClientAuthenticationError,
+ HttpResponseError,
+ ResourceExistsError,
+ ResourceNotFoundError,
+ ResourceNotModifiedError,
+ map_error,
+)
+from azure.core.pipeline import PipelineResponse
+from azure.core.rest import HttpRequest, HttpResponse
+from azure.core.tracing.decorator import distributed_trace
+from azure.core.utils import case_insensitive_dict
+
+from .. import models as _models
+from .._serialization import Serializer
+from .._vendor import QuickpulseClientMixinABC
+
+if sys.version_info >= (3, 9):
+ from collections.abc import MutableMapping
+else:
+ from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports
+T = TypeVar("T")
+ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
+
+_SERIALIZER = Serializer()
+_SERIALIZER.client_side_validation = False
+
+
+def build_quickpulse_is_subscribed_request(
+ *,
+ ikey: str,
+ transmission_time: Optional[int] = None,
+ machine_name: Optional[str] = None,
+ instance_name: Optional[str] = None,
+ stream_id: Optional[str] = None,
+ role_name: Optional[str] = None,
+ invariant_version: Optional[str] = None,
+ configuration_etag: Optional[str] = None,
+ **kwargs: Any
+) -> HttpRequest:
+ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-04-01-preview"))
+ accept = _headers.pop("Accept", "application/json")
+
+ # Construct URL
+ _url = "/QuickPulseService.svc/ping"
+
+ # Construct parameters
+ _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
+ _params["ikey"] = _SERIALIZER.query("ikey", ikey, "str")
+
+ # Construct headers
+ if transmission_time is not None:
+ _headers["x-ms-qps-transmission-time"] = _SERIALIZER.header("transmission_time", transmission_time, "int")
+ if machine_name is not None:
+ _headers["x-ms-qps-machine-name"] = _SERIALIZER.header("machine_name", machine_name, "str")
+ if instance_name is not None:
+ _headers["x-ms-qps-instance-name"] = _SERIALIZER.header("instance_name", instance_name, "str")
+ if stream_id is not None:
+ _headers["x-ms-qps-stream-id"] = _SERIALIZER.header("stream_id", stream_id, "str")
+ if role_name is not None:
+ _headers["x-ms-qps-role-name"] = _SERIALIZER.header("role_name", role_name, "str")
+ if invariant_version is not None:
+ _headers["x-ms-qps-invariant-version"] = _SERIALIZER.header("invariant_version", invariant_version, "str")
+ if configuration_etag is not None:
+ _headers["x-ms-qps-configuration-etag"] = _SERIALIZER.header("configuration_etag", configuration_etag, "str")
+ if content_type is not None:
+ _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str")
+ _headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
+
+ return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs)
+
+
+def build_quickpulse_publish_request(
+ *, ikey: str, configuration_etag: Optional[str] = None, transmission_time: Optional[int] = None, **kwargs: Any
+) -> HttpRequest:
+ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-04-01-preview"))
+ accept = _headers.pop("Accept", "application/json")
+
+ # Construct URL
+ _url = "/QuickPulseService.svc/post"
+
+ # Construct parameters
+ _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
+ _params["ikey"] = _SERIALIZER.query("ikey", ikey, "str")
+
+ # Construct headers
+ if configuration_etag is not None:
+ _headers["x-ms-qps-configuration-etag"] = _SERIALIZER.header("configuration_etag", configuration_etag, "str")
+ if transmission_time is not None:
+ _headers["x-ms-qps-transmission-time"] = _SERIALIZER.header("transmission_time", transmission_time, "int")
+ if content_type is not None:
+ _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str")
+ _headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
+
+ return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs)
+
+
+class QuickpulseClientOperationsMixin(QuickpulseClientMixinABC):
+ @overload
+ def is_subscribed(
+ self,
+ endpoint: str = "https://global.livediagnostics.monitor.azure.com",
+ monitoring_data_point: Optional[_models.MonitoringDataPoint] = None,
+ *,
+ ikey: str,
+ transmission_time: Optional[int] = None,
+ machine_name: Optional[str] = None,
+ instance_name: Optional[str] = None,
+ stream_id: Optional[str] = None,
+ role_name: Optional[str] = None,
+ invariant_version: Optional[str] = None,
+ configuration_etag: Optional[str] = None,
+ content_type: str = "application/json",
+ **kwargs: Any
+ ) -> _models.CollectionConfigurationInfo:
+ """Determine whether there is any subscription to the metrics and documents.
+
+ :param endpoint: The endpoint of the Live Metrics service. Default value is
+ "https://global.livediagnostics.monitor.azure.com".
+ :type endpoint: str
+ :param monitoring_data_point: Data contract between Application Insights client SDK and Live
+ Metrics. /QuickPulseService.svc/ping uses this as a backup source of machine name, instance
+ name and invariant version. Default value is None.
+ :type monitoring_data_point: ~quickpulse_client.models.MonitoringDataPoint
+ :keyword ikey: The instrumentation key of the target Application Insights component for which
+ the client checks whether there's any subscription to it. Required.
+ :paramtype ikey: str
+ :keyword transmission_time: Timestamp when the client transmits the metrics and documents to
+ Live Metrics. A 8-byte long type of ticks. Default value is None.
+ :paramtype transmission_time: int
+ :keyword machine_name: Computer name where Application Insights SDK lives. Live Metrics uses
+ machine name with instance name as a backup. Default value is None.
+ :paramtype machine_name: str
+ :keyword instance_name: Service instance name where Application Insights SDK lives. Live
+ Metrics uses machine name with instance name as a backup. Default value is None.
+ :paramtype instance_name: str
+ :keyword stream_id: Identifies an Application Insights SDK as trusted agent to report metrics
+ and documents. Default value is None.
+ :paramtype stream_id: str
+ :keyword role_name: Cloud role name of the service. Default value is None.
+ :paramtype role_name: str
+ :keyword invariant_version: Version/generation of the data contract (MonitoringDataPoint)
+ between the client and Live Metrics. Default value is None.
+ :paramtype invariant_version: str
+ :keyword configuration_etag: An encoded string that indicates whether the collection
+ configuration is changed. Default value is None.
+ :paramtype configuration_etag: str
+ :keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
+ Default value is "application/json".
+ :paramtype content_type: str
+ :return: CollectionConfigurationInfo
+ :rtype: ~quickpulse_client.models.CollectionConfigurationInfo
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+
+ @overload
+ def is_subscribed(
+ self,
+ endpoint: str = "https://global.livediagnostics.monitor.azure.com",
+ monitoring_data_point: Optional[IO[bytes]] = None,
+ *,
+ ikey: str,
+ transmission_time: Optional[int] = None,
+ machine_name: Optional[str] = None,
+ instance_name: Optional[str] = None,
+ stream_id: Optional[str] = None,
+ role_name: Optional[str] = None,
+ invariant_version: Optional[str] = None,
+ configuration_etag: Optional[str] = None,
+ content_type: str = "application/json",
+ **kwargs: Any
+ ) -> _models.CollectionConfigurationInfo:
+ """Determine whether there is any subscription to the metrics and documents.
+
+ :param endpoint: The endpoint of the Live Metrics service. Default value is
+ "https://global.livediagnostics.monitor.azure.com".
+ :type endpoint: str
+ :param monitoring_data_point: Data contract between Application Insights client SDK and Live
+ Metrics. /QuickPulseService.svc/ping uses this as a backup source of machine name, instance
+ name and invariant version. Default value is None.
+ :type monitoring_data_point: IO[bytes]
+ :keyword ikey: The instrumentation key of the target Application Insights component for which
+ the client checks whether there's any subscription to it. Required.
+ :paramtype ikey: str
+ :keyword transmission_time: Timestamp when the client transmits the metrics and documents to
+ Live Metrics. A 8-byte long type of ticks. Default value is None.
+ :paramtype transmission_time: int
+ :keyword machine_name: Computer name where Application Insights SDK lives. Live Metrics uses
+ machine name with instance name as a backup. Default value is None.
+ :paramtype machine_name: str
+ :keyword instance_name: Service instance name where Application Insights SDK lives. Live
+ Metrics uses machine name with instance name as a backup. Default value is None.
+ :paramtype instance_name: str
+ :keyword stream_id: Identifies an Application Insights SDK as trusted agent to report metrics
+ and documents. Default value is None.
+ :paramtype stream_id: str
+ :keyword role_name: Cloud role name of the service. Default value is None.
+ :paramtype role_name: str
+ :keyword invariant_version: Version/generation of the data contract (MonitoringDataPoint)
+ between the client and Live Metrics. Default value is None.
+ :paramtype invariant_version: str
+ :keyword configuration_etag: An encoded string that indicates whether the collection
+ configuration is changed. Default value is None.
+ :paramtype configuration_etag: str
+ :keyword content_type: Body Parameter content-type. Content type parameter for binary body.
+ Default value is "application/json".
+ :paramtype content_type: str
+ :return: CollectionConfigurationInfo
+ :rtype: ~quickpulse_client.models.CollectionConfigurationInfo
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+
+ # @distributed_trace
+ def is_subscribed(
+ self,
+ endpoint: str = "https://global.livediagnostics.monitor.azure.com",
+ monitoring_data_point: Optional[Union[_models.MonitoringDataPoint, IO[bytes]]] = None,
+ *,
+ ikey: str,
+ transmission_time: Optional[int] = None,
+ machine_name: Optional[str] = None,
+ instance_name: Optional[str] = None,
+ stream_id: Optional[str] = None,
+ role_name: Optional[str] = None,
+ invariant_version: Optional[str] = None,
+ configuration_etag: Optional[str] = None,
+ **kwargs: Any
+ ) -> _models.CollectionConfigurationInfo:
+ """Determine whether there is any subscription to the metrics and documents.
+
+ :param endpoint: The endpoint of the Live Metrics service. Default value is
+ "https://global.livediagnostics.monitor.azure.com".
+ :type endpoint: str
+ :param monitoring_data_point: Data contract between Application Insights client SDK and Live
+ Metrics. /QuickPulseService.svc/ping uses this as a backup source of machine name, instance
+ name and invariant version. Is either a MonitoringDataPoint type or a IO[bytes] type. Default
+ value is None.
+ :type monitoring_data_point: ~quickpulse_client.models.MonitoringDataPoint or IO[bytes]
+ :keyword ikey: The instrumentation key of the target Application Insights component for which
+ the client checks whether there's any subscription to it. Required.
+ :paramtype ikey: str
+ :keyword transmission_time: Timestamp when the client transmits the metrics and documents to
+ Live Metrics. A 8-byte long type of ticks. Default value is None.
+ :paramtype transmission_time: int
+ :keyword machine_name: Computer name where Application Insights SDK lives. Live Metrics uses
+ machine name with instance name as a backup. Default value is None.
+ :paramtype machine_name: str
+ :keyword instance_name: Service instance name where Application Insights SDK lives. Live
+ Metrics uses machine name with instance name as a backup. Default value is None.
+ :paramtype instance_name: str
+ :keyword stream_id: Identifies an Application Insights SDK as trusted agent to report metrics
+ and documents. Default value is None.
+ :paramtype stream_id: str
+ :keyword role_name: Cloud role name of the service. Default value is None.
+ :paramtype role_name: str
+ :keyword invariant_version: Version/generation of the data contract (MonitoringDataPoint)
+ between the client and Live Metrics. Default value is None.
+ :paramtype invariant_version: str
+ :keyword configuration_etag: An encoded string that indicates whether the collection
+ configuration is changed. Default value is None.
+ :paramtype configuration_etag: str
+ :return: CollectionConfigurationInfo
+ :rtype: ~quickpulse_client.models.CollectionConfigurationInfo
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ 401: ClientAuthenticationError,
+ 404: ResourceNotFoundError,
+ 409: ResourceExistsError,
+ 304: ResourceNotModifiedError,
+ }
+ error_map.update(kwargs.pop("error_map", {}) or {})
+
+ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
+ _params = kwargs.pop("params", {}) or {}
+
+ content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
+ cls: ClsType[_models.CollectionConfigurationInfo] = kwargs.pop("cls", None)
+
+ content_type = content_type or "application/json"
+ _json = None
+ _content = None
+ if isinstance(monitoring_data_point, (IOBase, bytes)):
+ _content = monitoring_data_point
+ else:
+ if monitoring_data_point is not None:
+ _json = self._serialize.body(monitoring_data_point, "MonitoringDataPoint")
+ else:
+ _json = None
+
+ _request = build_quickpulse_is_subscribed_request(
+ ikey=ikey,
+ transmission_time=transmission_time,
+ machine_name=machine_name,
+ instance_name=instance_name,
+ stream_id=stream_id,
+ role_name=role_name,
+ invariant_version=invariant_version,
+ configuration_etag=configuration_etag,
+ content_type=content_type,
+ api_version=self._config.api_version,
+ json=_json,
+ content=_content,
+ headers=_headers,
+ params=_params,
+ )
+ path_format_arguments = {
+ "endpoint": self._serialize.url("endpoint", endpoint, "str", skip_quote=True),
+ }
+ _request.url = self._client.format_url(_request.url, **path_format_arguments)
+
+ _stream = False
+ pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
+ )
+
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ if _stream:
+ response.read() # Load the body in memory and close the socket
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ServiceError, pipeline_response)
+ raise HttpResponseError(response=response, model=error)
+
+ response_headers = {}
+ response_headers["x-ms-qps-configuration-etag"] = self._deserialize(
+ "str", response.headers.get("x-ms-qps-configuration-etag")
+ )
+ response_headers["x-ms-qps-service-endpoint-redirect-v2"] = self._deserialize(
+ "str", response.headers.get("x-ms-qps-service-endpoint-redirect-v2")
+ )
+ response_headers["x-ms-qps-service-polling-interval-hint"] = self._deserialize(
+ "str", response.headers.get("x-ms-qps-service-polling-interval-hint")
+ )
+ response_headers["x-ms-qps-subscribed"] = self._deserialize("str", response.headers.get("x-ms-qps-subscribed"))
+
+ deserialized = self._deserialize("CollectionConfigurationInfo", pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, response_headers) # type: ignore
+
+ return deserialized # type: ignore
+
+ @overload
+ def publish(
+ self,
+ endpoint: str = "https://global.livediagnostics.monitor.azure.com",
+ monitoring_data_points: Optional[List[_models.MonitoringDataPoint]] = None,
+ *,
+ ikey: str,
+ configuration_etag: Optional[str] = None,
+ transmission_time: Optional[int] = None,
+ content_type: str = "application/json",
+ **kwargs: Any
+ ) -> _models.CollectionConfigurationInfo:
+ """Publish live metrics to the Live Metrics service when there is an active subscription to the
+ metrics.
+
+ :param endpoint: The endpoint of the Live Metrics service. Default value is
+ "https://global.livediagnostics.monitor.azure.com".
+ :type endpoint: str
+ :param monitoring_data_points: Data contract between the client and Live Metrics.
+ /QuickPulseService.svc/ping uses this as a backup source of machine name, instance name and
+ invariant version. Default value is None.
+ :type monitoring_data_points: list[~quickpulse_client.models.MonitoringDataPoint]
+ :keyword ikey: The instrumentation key of the target Application Insights component for which
+ the client checks whether there's any subscription to it. Required.
+ :paramtype ikey: str
+ :keyword configuration_etag: An encoded string that indicates whether the collection
+ configuration is changed. Default value is None.
+ :paramtype configuration_etag: str
+ :keyword transmission_time: Timestamp when the client transmits the metrics and documents to
+ Live Metrics. A 8-byte long type of ticks. Default value is None.
+ :paramtype transmission_time: int
+ :keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
+ Default value is "application/json".
+ :paramtype content_type: str
+ :return: CollectionConfigurationInfo
+ :rtype: ~quickpulse_client.models.CollectionConfigurationInfo
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+
+ @overload
+ def publish(
+ self,
+ endpoint: str = "https://global.livediagnostics.monitor.azure.com",
+ monitoring_data_points: Optional[IO[bytes]] = None,
+ *,
+ ikey: str,
+ configuration_etag: Optional[str] = None,
+ transmission_time: Optional[int] = None,
+ content_type: str = "application/json",
+ **kwargs: Any
+ ) -> _models.CollectionConfigurationInfo:
+ """Publish live metrics to the Live Metrics service when there is an active subscription to the
+ metrics.
+
+ :param endpoint: The endpoint of the Live Metrics service. Default value is
+ "https://global.livediagnostics.monitor.azure.com".
+ :type endpoint: str
+ :param monitoring_data_points: Data contract between the client and Live Metrics.
+ /QuickPulseService.svc/ping uses this as a backup source of machine name, instance name and
+ invariant version. Default value is None.
+ :type monitoring_data_points: IO[bytes]
+ :keyword ikey: The instrumentation key of the target Application Insights component for which
+ the client checks whether there's any subscription to it. Required.
+ :paramtype ikey: str
+ :keyword configuration_etag: An encoded string that indicates whether the collection
+ configuration is changed. Default value is None.
+ :paramtype configuration_etag: str
+ :keyword transmission_time: Timestamp when the client transmits the metrics and documents to
+ Live Metrics. A 8-byte long type of ticks. Default value is None.
+ :paramtype transmission_time: int
+ :keyword content_type: Body Parameter content-type. Content type parameter for binary body.
+ Default value is "application/json".
+ :paramtype content_type: str
+ :return: CollectionConfigurationInfo
+ :rtype: ~quickpulse_client.models.CollectionConfigurationInfo
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+
+ # @distributed_trace
+ def publish(
+ self,
+ endpoint: str = "https://global.livediagnostics.monitor.azure.com",
+ monitoring_data_points: Optional[Union[List[_models.MonitoringDataPoint], IO[bytes]]] = None,
+ *,
+ ikey: str,
+ configuration_etag: Optional[str] = None,
+ transmission_time: Optional[int] = None,
+ **kwargs: Any
+ ) -> _models.CollectionConfigurationInfo:
+ """Publish live metrics to the Live Metrics service when there is an active subscription to the
+ metrics.
+
+ :param endpoint: The endpoint of the Live Metrics service. Default value is
+ "https://global.livediagnostics.monitor.azure.com".
+ :type endpoint: str
+ :param monitoring_data_points: Data contract between the client and Live Metrics.
+ /QuickPulseService.svc/ping uses this as a backup source of machine name, instance name and
+ invariant version. Is either a [MonitoringDataPoint] type or a IO[bytes] type. Default value is
+ None.
+ :type monitoring_data_points: list[~quickpulse_client.models.MonitoringDataPoint] or IO[bytes]
+ :keyword ikey: The instrumentation key of the target Application Insights component for which
+ the client checks whether there's any subscription to it. Required.
+ :paramtype ikey: str
+ :keyword configuration_etag: An encoded string that indicates whether the collection
+ configuration is changed. Default value is None.
+ :paramtype configuration_etag: str
+ :keyword transmission_time: Timestamp when the client transmits the metrics and documents to
+ Live Metrics. A 8-byte long type of ticks. Default value is None.
+ :paramtype transmission_time: int
+ :return: CollectionConfigurationInfo
+ :rtype: ~quickpulse_client.models.CollectionConfigurationInfo
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ 401: ClientAuthenticationError,
+ 404: ResourceNotFoundError,
+ 409: ResourceExistsError,
+ 304: ResourceNotModifiedError,
+ }
+ error_map.update(kwargs.pop("error_map", {}) or {})
+
+ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
+ _params = kwargs.pop("params", {}) or {}
+
+ content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
+ cls: ClsType[_models.CollectionConfigurationInfo] = kwargs.pop("cls", None)
+
+ content_type = content_type or "application/json"
+ _json = None
+ _content = None
+ if isinstance(monitoring_data_points, (IOBase, bytes)):
+ _content = monitoring_data_points
+ else:
+ if monitoring_data_points is not None:
+ _json = self._serialize.body(monitoring_data_points, "[MonitoringDataPoint]")
+ else:
+ _json = None
+
+ _request = build_quickpulse_publish_request(
+ ikey=ikey,
+ configuration_etag=configuration_etag,
+ transmission_time=transmission_time,
+ content_type=content_type,
+ api_version=self._config.api_version,
+ json=_json,
+ content=_content,
+ headers=_headers,
+ params=_params,
+ )
+ path_format_arguments = {
+ "endpoint": self._serialize.url("endpoint", endpoint, "str", skip_quote=True),
+ }
+ _request.url = self._client.format_url(_request.url, **path_format_arguments)
+
+ _stream = False
+ pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
+ )
+
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ if _stream:
+ response.read() # Load the body in memory and close the socket
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ServiceError, pipeline_response)
+ raise HttpResponseError(response=response, model=error)
+
+ response_headers = {}
+ response_headers["x-ms-qps-configuration-etag"] = self._deserialize(
+ "str", response.headers.get("x-ms-qps-configuration-etag")
+ )
+ response_headers["x-ms-qps-subscribed"] = self._deserialize("str", response.headers.get("x-ms-qps-subscribed"))
+
+ deserialized = self._deserialize("CollectionConfigurationInfo", pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, response_headers) # type: ignore
+
+ return deserialized # type: ignore
diff --git a/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/_operations/_patch.py b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/_operations/_patch.py
new file mode 100644
index 00000000..f7dd3251
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/_operations/_patch.py
@@ -0,0 +1,20 @@
+# ------------------------------------
+# Copyright (c) Microsoft Corporation.
+# Licensed under the MIT License.
+# ------------------------------------
+"""Customize generated code here.
+
+Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize
+"""
+from typing import List
+
+__all__: List[str] = [] # Add all objects you want publicly available to users at this package level
+
+
+def patch_sdk():
+ """Do not remove from this file.
+
+ `patch_sdk` is a last resort escape hatch that allows you to do customizations
+ you can't accomplish using the techniques described in
+ https://aka.ms/azsdk/python/dpcodegen/python/customize
+ """
diff --git a/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/_patch.py b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/_patch.py
new file mode 100644
index 00000000..f7dd3251
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/_patch.py
@@ -0,0 +1,20 @@
+# ------------------------------------
+# Copyright (c) Microsoft Corporation.
+# Licensed under the MIT License.
+# ------------------------------------
+"""Customize generated code here.
+
+Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize
+"""
+from typing import List
+
+__all__: List[str] = [] # Add all objects you want publicly available to users at this package level
+
+
+def patch_sdk():
+ """Do not remove from this file.
+
+ `patch_sdk` is a last resort escape hatch that allows you to do customizations
+ you can't accomplish using the techniques described in
+ https://aka.ms/azsdk/python/dpcodegen/python/customize
+ """
diff --git a/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/_serialization.py b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/_serialization.py
new file mode 100644
index 00000000..2f781d74
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/_serialization.py
@@ -0,0 +1,1998 @@
+# --------------------------------------------------------------------------
+#
+# Copyright (c) Microsoft Corporation. All rights reserved.
+#
+# The MIT License (MIT)
+#
+# Permission is hereby granted, free of charge, to any person obtaining a copy
+# of this software and associated documentation files (the ""Software""), to
+# deal in the Software without restriction, including without limitation the
+# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+# sell copies of the Software, and to permit persons to whom the Software is
+# furnished to do so, subject to the following conditions:
+#
+# The above copyright notice and this permission notice shall be included in
+# all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+# IN THE SOFTWARE.
+#
+# --------------------------------------------------------------------------
+
+# pylint: skip-file
+# pyright: reportUnnecessaryTypeIgnoreComment=false
+
+from base64 import b64decode, b64encode
+import calendar
+import datetime
+import decimal
+import email
+from enum import Enum
+import json
+import logging
+import re
+import sys
+import codecs
+from typing import (
+ Dict,
+ Any,
+ cast,
+ Optional,
+ Union,
+ AnyStr,
+ IO,
+ Mapping,
+ Callable,
+ TypeVar,
+ MutableMapping,
+ Type,
+ List,
+ Mapping,
+)
+
+try:
+ from urllib import quote # type: ignore
+except ImportError:
+ from urllib.parse import quote
+import xml.etree.ElementTree as ET
+
+import isodate # type: ignore
+
+from azure.core.exceptions import DeserializationError, SerializationError
+from azure.core.serialization import NULL as CoreNull
+
+_BOM = codecs.BOM_UTF8.decode(encoding="utf-8")
+
+ModelType = TypeVar("ModelType", bound="Model")
+JSON = MutableMapping[str, Any]
+
+
+class RawDeserializer:
+
+ # Accept "text" because we're open minded people...
+ JSON_REGEXP = re.compile(r"^(application|text)/([a-z+.]+\+)?json$")
+
+ # Name used in context
+ CONTEXT_NAME = "deserialized_data"
+
+ @classmethod
+ def deserialize_from_text(cls, data: Optional[Union[AnyStr, IO]], content_type: Optional[str] = None) -> Any:
+ """Decode data according to content-type.
+
+ Accept a stream of data as well, but will be load at once in memory for now.
+
+ If no content-type, will return the string version (not bytes, not stream)
+
+ :param data: Input, could be bytes or stream (will be decoded with UTF8) or text
+ :type data: str or bytes or IO
+ :param str content_type: The content type.
+ """
+ if hasattr(data, "read"):
+ # Assume a stream
+ data = cast(IO, data).read()
+
+ if isinstance(data, bytes):
+ data_as_str = data.decode(encoding="utf-8-sig")
+ else:
+ # Explain to mypy the correct type.
+ data_as_str = cast(str, data)
+
+ # Remove Byte Order Mark if present in string
+ data_as_str = data_as_str.lstrip(_BOM)
+
+ if content_type is None:
+ return data
+
+ if cls.JSON_REGEXP.match(content_type):
+ try:
+ return json.loads(data_as_str)
+ except ValueError as err:
+ raise DeserializationError("JSON is invalid: {}".format(err), err)
+ elif "xml" in (content_type or []):
+ try:
+
+ try:
+ if isinstance(data, unicode): # type: ignore
+ # If I'm Python 2.7 and unicode XML will scream if I try a "fromstring" on unicode string
+ data_as_str = data_as_str.encode(encoding="utf-8") # type: ignore
+ except NameError:
+ pass
+
+ return ET.fromstring(data_as_str) # nosec
+ except ET.ParseError as err:
+ # It might be because the server has an issue, and returned JSON with
+ # content-type XML....
+ # So let's try a JSON load, and if it's still broken
+ # let's flow the initial exception
+ def _json_attemp(data):
+ try:
+ return True, json.loads(data)
+ except ValueError:
+ return False, None # Don't care about this one
+
+ success, json_result = _json_attemp(data)
+ if success:
+ return json_result
+ # If i'm here, it's not JSON, it's not XML, let's scream
+ # and raise the last context in this block (the XML exception)
+ # The function hack is because Py2.7 messes up with exception
+ # context otherwise.
+ _LOGGER.critical("Wasn't XML not JSON, failing")
+ raise DeserializationError("XML is invalid") from err
+ raise DeserializationError("Cannot deserialize content-type: {}".format(content_type))
+
+ @classmethod
+ def deserialize_from_http_generics(cls, body_bytes: Optional[Union[AnyStr, IO]], headers: Mapping) -> Any:
+ """Deserialize from HTTP response.
+
+ Use bytes and headers to NOT use any requests/aiohttp or whatever
+ specific implementation.
+ Headers will tested for "content-type"
+ """
+ # Try to use content-type from headers if available
+ content_type = None
+ if "content-type" in headers:
+ content_type = headers["content-type"].split(";")[0].strip().lower()
+ # Ouch, this server did not declare what it sent...
+ # Let's guess it's JSON...
+ # Also, since Autorest was considering that an empty body was a valid JSON,
+ # need that test as well....
+ else:
+ content_type = "application/json"
+
+ if body_bytes:
+ return cls.deserialize_from_text(body_bytes, content_type)
+ return None
+
+
+_LOGGER = logging.getLogger(__name__)
+
+try:
+ _long_type = long # type: ignore
+except NameError:
+ _long_type = int
+
+
+class UTC(datetime.tzinfo):
+ """Time Zone info for handling UTC"""
+
+ def utcoffset(self, dt):
+ """UTF offset for UTC is 0."""
+ return datetime.timedelta(0)
+
+ def tzname(self, dt):
+ """Timestamp representation."""
+ return "Z"
+
+ def dst(self, dt):
+ """No daylight saving for UTC."""
+ return datetime.timedelta(hours=1)
+
+
+try:
+ from datetime import timezone as _FixedOffset # type: ignore
+except ImportError: # Python 2.7
+
+ class _FixedOffset(datetime.tzinfo): # type: ignore
+ """Fixed offset in minutes east from UTC.
+ Copy/pasted from Python doc
+ :param datetime.timedelta offset: offset in timedelta format
+ """
+
+ def __init__(self, offset):
+ self.__offset = offset
+
+ def utcoffset(self, dt):
+ return self.__offset
+
+ def tzname(self, dt):
+ return str(self.__offset.total_seconds() / 3600)
+
+ def __repr__(self):
+ return "<FixedOffset {}>".format(self.tzname(None))
+
+ def dst(self, dt):
+ return datetime.timedelta(0)
+
+ def __getinitargs__(self):
+ return (self.__offset,)
+
+
+try:
+ from datetime import timezone
+
+ TZ_UTC = timezone.utc
+except ImportError:
+ TZ_UTC = UTC() # type: ignore
+
+_FLATTEN = re.compile(r"(?<!\\)\.")
+
+
+def attribute_transformer(key, attr_desc, value):
+ """A key transformer that returns the Python attribute.
+
+ :param str key: The attribute name
+ :param dict attr_desc: The attribute metadata
+ :param object value: The value
+ :returns: A key using attribute name
+ """
+ return (key, value)
+
+
+def full_restapi_key_transformer(key, attr_desc, value):
+ """A key transformer that returns the full RestAPI key path.
+
+ :param str _: The attribute name
+ :param dict attr_desc: The attribute metadata
+ :param object value: The value
+ :returns: A list of keys using RestAPI syntax.
+ """
+ keys = _FLATTEN.split(attr_desc["key"])
+ return ([_decode_attribute_map_key(k) for k in keys], value)
+
+
+def last_restapi_key_transformer(key, attr_desc, value):
+ """A key transformer that returns the last RestAPI key.
+
+ :param str key: The attribute name
+ :param dict attr_desc: The attribute metadata
+ :param object value: The value
+ :returns: The last RestAPI key.
+ """
+ key, value = full_restapi_key_transformer(key, attr_desc, value)
+ return (key[-1], value)
+
+
+def _create_xml_node(tag, prefix=None, ns=None):
+ """Create a XML node."""
+ if prefix and ns:
+ ET.register_namespace(prefix, ns)
+ if ns:
+ return ET.Element("{" + ns + "}" + tag)
+ else:
+ return ET.Element(tag)
+
+
+class Model(object):
+ """Mixin for all client request body/response body models to support
+ serialization and deserialization.
+ """
+
+ _subtype_map: Dict[str, Dict[str, Any]] = {}
+ _attribute_map: Dict[str, Dict[str, Any]] = {}
+ _validation: Dict[str, Dict[str, Any]] = {}
+
+ def __init__(self, **kwargs: Any) -> None:
+ self.additional_properties: Optional[Dict[str, Any]] = {}
+ for k in kwargs:
+ if k not in self._attribute_map:
+ _LOGGER.warning("%s is not a known attribute of class %s and will be ignored", k, self.__class__)
+ elif k in self._validation and self._validation[k].get("readonly", False):
+ _LOGGER.warning("Readonly attribute %s will be ignored in class %s", k, self.__class__)
+ else:
+ setattr(self, k, kwargs[k])
+
+ def __eq__(self, other: Any) -> bool:
+ """Compare objects by comparing all attributes."""
+ if isinstance(other, self.__class__):
+ return self.__dict__ == other.__dict__
+ return False
+
+ def __ne__(self, other: Any) -> bool:
+ """Compare objects by comparing all attributes."""
+ return not self.__eq__(other)
+
+ def __str__(self) -> str:
+ return str(self.__dict__)
+
+ @classmethod
+ def enable_additional_properties_sending(cls) -> None:
+ cls._attribute_map["additional_properties"] = {"key": "", "type": "{object}"}
+
+ @classmethod
+ def is_xml_model(cls) -> bool:
+ try:
+ cls._xml_map # type: ignore
+ except AttributeError:
+ return False
+ return True
+
+ @classmethod
+ def _create_xml_node(cls):
+ """Create XML node."""
+ try:
+ xml_map = cls._xml_map # type: ignore
+ except AttributeError:
+ xml_map = {}
+
+ return _create_xml_node(xml_map.get("name", cls.__name__), xml_map.get("prefix", None), xml_map.get("ns", None))
+
+ def serialize(self, keep_readonly: bool = False, **kwargs: Any) -> JSON:
+ """Return the JSON that would be sent to server from this model.
+
+ This is an alias to `as_dict(full_restapi_key_transformer, keep_readonly=False)`.
+
+ If you want XML serialization, you can pass the kwargs is_xml=True.
+
+ :param bool keep_readonly: If you want to serialize the readonly attributes
+ :returns: A dict JSON compatible object
+ :rtype: dict
+ """
+ serializer = Serializer(self._infer_class_models())
+ return serializer._serialize(self, keep_readonly=keep_readonly, **kwargs) # type: ignore
+
+ def as_dict(
+ self,
+ keep_readonly: bool = True,
+ key_transformer: Callable[[str, Dict[str, Any], Any], Any] = attribute_transformer,
+ **kwargs: Any
+ ) -> JSON:
+ """Return a dict that can be serialized using json.dump.
+
+ Advanced usage might optionally use a callback as parameter:
+
+ .. code::python
+
+ def my_key_transformer(key, attr_desc, value):
+ return key
+
+ Key is the attribute name used in Python. Attr_desc
+ is a dict of metadata. Currently contains 'type' with the
+ msrest type and 'key' with the RestAPI encoded key.
+ Value is the current value in this object.
+
+ The string returned will be used to serialize the key.
+ If the return type is a list, this is considered hierarchical
+ result dict.
+
+ See the three examples in this file:
+
+ - attribute_transformer
+ - full_restapi_key_transformer
+ - last_restapi_key_transformer
+
+ If you want XML serialization, you can pass the kwargs is_xml=True.
+
+ :param function key_transformer: A key transformer function.
+ :returns: A dict JSON compatible object
+ :rtype: dict
+ """
+ serializer = Serializer(self._infer_class_models())
+ return serializer._serialize(self, key_transformer=key_transformer, keep_readonly=keep_readonly, **kwargs) # type: ignore
+
+ @classmethod
+ def _infer_class_models(cls):
+ try:
+ str_models = cls.__module__.rsplit(".", 1)[0]
+ models = sys.modules[str_models]
+ client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)}
+ if cls.__name__ not in client_models:
+ raise ValueError("Not Autorest generated code")
+ except Exception:
+ # Assume it's not Autorest generated (tests?). Add ourselves as dependencies.
+ client_models = {cls.__name__: cls}
+ return client_models
+
+ @classmethod
+ def deserialize(cls: Type[ModelType], data: Any, content_type: Optional[str] = None) -> ModelType:
+ """Parse a str using the RestAPI syntax and return a model.
+
+ :param str data: A str using RestAPI structure. JSON by default.
+ :param str content_type: JSON by default, set application/xml if XML.
+ :returns: An instance of this model
+ :raises: DeserializationError if something went wrong
+ """
+ deserializer = Deserializer(cls._infer_class_models())
+ return deserializer(cls.__name__, data, content_type=content_type) # type: ignore
+
+ @classmethod
+ def from_dict(
+ cls: Type[ModelType],
+ data: Any,
+ key_extractors: Optional[Callable[[str, Dict[str, Any], Any], Any]] = None,
+ content_type: Optional[str] = None,
+ ) -> ModelType:
+ """Parse a dict using given key extractor return a model.
+
+ By default consider key
+ extractors (rest_key_case_insensitive_extractor, attribute_key_case_insensitive_extractor
+ and last_rest_key_case_insensitive_extractor)
+
+ :param dict data: A dict using RestAPI structure
+ :param str content_type: JSON by default, set application/xml if XML.
+ :returns: An instance of this model
+ :raises: DeserializationError if something went wrong
+ """
+ deserializer = Deserializer(cls._infer_class_models())
+ deserializer.key_extractors = ( # type: ignore
+ [ # type: ignore
+ attribute_key_case_insensitive_extractor,
+ rest_key_case_insensitive_extractor,
+ last_rest_key_case_insensitive_extractor,
+ ]
+ if key_extractors is None
+ else key_extractors
+ )
+ return deserializer(cls.__name__, data, content_type=content_type) # type: ignore
+
+ @classmethod
+ def _flatten_subtype(cls, key, objects):
+ if "_subtype_map" not in cls.__dict__:
+ return {}
+ result = dict(cls._subtype_map[key])
+ for valuetype in cls._subtype_map[key].values():
+ result.update(objects[valuetype]._flatten_subtype(key, objects))
+ return result
+
+ @classmethod
+ def _classify(cls, response, objects):
+ """Check the class _subtype_map for any child classes.
+ We want to ignore any inherited _subtype_maps.
+ Remove the polymorphic key from the initial data.
+ """
+ for subtype_key in cls.__dict__.get("_subtype_map", {}).keys():
+ subtype_value = None
+
+ if not isinstance(response, ET.Element):
+ rest_api_response_key = cls._get_rest_key_parts(subtype_key)[-1]
+ subtype_value = response.pop(rest_api_response_key, None) or response.pop(subtype_key, None)
+ else:
+ subtype_value = xml_key_extractor(subtype_key, cls._attribute_map[subtype_key], response)
+ if subtype_value:
+ # Try to match base class. Can be class name only
+ # (bug to fix in Autorest to support x-ms-discriminator-name)
+ if cls.__name__ == subtype_value:
+ return cls
+ flatten_mapping_type = cls._flatten_subtype(subtype_key, objects)
+ try:
+ return objects[flatten_mapping_type[subtype_value]] # type: ignore
+ except KeyError:
+ _LOGGER.warning(
+ "Subtype value %s has no mapping, use base class %s.",
+ subtype_value,
+ cls.__name__,
+ )
+ break
+ else:
+ _LOGGER.warning("Discriminator %s is absent or null, use base class %s.", subtype_key, cls.__name__)
+ break
+ return cls
+
+ @classmethod
+ def _get_rest_key_parts(cls, attr_key):
+ """Get the RestAPI key of this attr, split it and decode part
+ :param str attr_key: Attribute key must be in attribute_map.
+ :returns: A list of RestAPI part
+ :rtype: list
+ """
+ rest_split_key = _FLATTEN.split(cls._attribute_map[attr_key]["key"])
+ return [_decode_attribute_map_key(key_part) for key_part in rest_split_key]
+
+
+def _decode_attribute_map_key(key):
+ """This decode a key in an _attribute_map to the actual key we want to look at
+ inside the received data.
+
+ :param str key: A key string from the generated code
+ """
+ return key.replace("\\.", ".")
+
+
+class Serializer(object):
+ """Request object model serializer."""
+
+ basic_types = {str: "str", int: "int", bool: "bool", float: "float"}
+
+ _xml_basic_types_serializers = {"bool": lambda x: str(x).lower()}
+ days = {0: "Mon", 1: "Tue", 2: "Wed", 3: "Thu", 4: "Fri", 5: "Sat", 6: "Sun"}
+ months = {
+ 1: "Jan",
+ 2: "Feb",
+ 3: "Mar",
+ 4: "Apr",
+ 5: "May",
+ 6: "Jun",
+ 7: "Jul",
+ 8: "Aug",
+ 9: "Sep",
+ 10: "Oct",
+ 11: "Nov",
+ 12: "Dec",
+ }
+ validation = {
+ "min_length": lambda x, y: len(x) < y,
+ "max_length": lambda x, y: len(x) > y,
+ "minimum": lambda x, y: x < y,
+ "maximum": lambda x, y: x > y,
+ "minimum_ex": lambda x, y: x <= y,
+ "maximum_ex": lambda x, y: x >= y,
+ "min_items": lambda x, y: len(x) < y,
+ "max_items": lambda x, y: len(x) > y,
+ "pattern": lambda x, y: not re.match(y, x, re.UNICODE),
+ "unique": lambda x, y: len(x) != len(set(x)),
+ "multiple": lambda x, y: x % y != 0,
+ }
+
+ def __init__(self, classes: Optional[Mapping[str, type]] = None):
+ self.serialize_type = {
+ "iso-8601": Serializer.serialize_iso,
+ "rfc-1123": Serializer.serialize_rfc,
+ "unix-time": Serializer.serialize_unix,
+ "duration": Serializer.serialize_duration,
+ "date": Serializer.serialize_date,
+ "time": Serializer.serialize_time,
+ "decimal": Serializer.serialize_decimal,
+ "long": Serializer.serialize_long,
+ "bytearray": Serializer.serialize_bytearray,
+ "base64": Serializer.serialize_base64,
+ "object": self.serialize_object,
+ "[]": self.serialize_iter,
+ "{}": self.serialize_dict,
+ }
+ self.dependencies: Dict[str, type] = dict(classes) if classes else {}
+ self.key_transformer = full_restapi_key_transformer
+ self.client_side_validation = True
+
+ def _serialize(self, target_obj, data_type=None, **kwargs):
+ """Serialize data into a string according to type.
+
+ :param target_obj: The data to be serialized.
+ :param str data_type: The type to be serialized from.
+ :rtype: str, dict
+ :raises: SerializationError if serialization fails.
+ """
+ key_transformer = kwargs.get("key_transformer", self.key_transformer)
+ keep_readonly = kwargs.get("keep_readonly", False)
+ if target_obj is None:
+ return None
+
+ attr_name = None
+ class_name = target_obj.__class__.__name__
+
+ if data_type:
+ return self.serialize_data(target_obj, data_type, **kwargs)
+
+ if not hasattr(target_obj, "_attribute_map"):
+ data_type = type(target_obj).__name__
+ if data_type in self.basic_types.values():
+ return self.serialize_data(target_obj, data_type, **kwargs)
+
+ # Force "is_xml" kwargs if we detect a XML model
+ try:
+ is_xml_model_serialization = kwargs["is_xml"]
+ except KeyError:
+ is_xml_model_serialization = kwargs.setdefault("is_xml", target_obj.is_xml_model())
+
+ serialized = {}
+ if is_xml_model_serialization:
+ serialized = target_obj._create_xml_node()
+ try:
+ attributes = target_obj._attribute_map
+ for attr, attr_desc in attributes.items():
+ attr_name = attr
+ if not keep_readonly and target_obj._validation.get(attr_name, {}).get("readonly", False):
+ continue
+
+ if attr_name == "additional_properties" and attr_desc["key"] == "":
+ if target_obj.additional_properties is not None:
+ serialized.update(target_obj.additional_properties)
+ continue
+ try:
+
+ orig_attr = getattr(target_obj, attr)
+ if is_xml_model_serialization:
+ pass # Don't provide "transformer" for XML for now. Keep "orig_attr"
+ else: # JSON
+ keys, orig_attr = key_transformer(attr, attr_desc.copy(), orig_attr)
+ keys = keys if isinstance(keys, list) else [keys]
+
+ kwargs["serialization_ctxt"] = attr_desc
+ new_attr = self.serialize_data(orig_attr, attr_desc["type"], **kwargs)
+
+ if is_xml_model_serialization:
+ xml_desc = attr_desc.get("xml", {})
+ xml_name = xml_desc.get("name", attr_desc["key"])
+ xml_prefix = xml_desc.get("prefix", None)
+ xml_ns = xml_desc.get("ns", None)
+ if xml_desc.get("attr", False):
+ if xml_ns:
+ ET.register_namespace(xml_prefix, xml_ns)
+ xml_name = "{{{}}}{}".format(xml_ns, xml_name)
+ serialized.set(xml_name, new_attr) # type: ignore
+ continue
+ if xml_desc.get("text", False):
+ serialized.text = new_attr # type: ignore
+ continue
+ if isinstance(new_attr, list):
+ serialized.extend(new_attr) # type: ignore
+ elif isinstance(new_attr, ET.Element):
+ # If the down XML has no XML/Name, we MUST replace the tag with the local tag. But keeping the namespaces.
+ if "name" not in getattr(orig_attr, "_xml_map", {}):
+ splitted_tag = new_attr.tag.split("}")
+ if len(splitted_tag) == 2: # Namespace
+ new_attr.tag = "}".join([splitted_tag[0], xml_name])
+ else:
+ new_attr.tag = xml_name
+ serialized.append(new_attr) # type: ignore
+ else: # That's a basic type
+ # Integrate namespace if necessary
+ local_node = _create_xml_node(xml_name, xml_prefix, xml_ns)
+ local_node.text = str(new_attr)
+ serialized.append(local_node) # type: ignore
+ else: # JSON
+ for k in reversed(keys): # type: ignore
+ new_attr = {k: new_attr}
+
+ _new_attr = new_attr
+ _serialized = serialized
+ for k in keys: # type: ignore
+ if k not in _serialized:
+ _serialized.update(_new_attr) # type: ignore
+ _new_attr = _new_attr[k] # type: ignore
+ _serialized = _serialized[k]
+ except ValueError as err:
+ if isinstance(err, SerializationError):
+ raise
+
+ except (AttributeError, KeyError, TypeError) as err:
+ msg = "Attribute {} in object {} cannot be serialized.\n{}".format(attr_name, class_name, str(target_obj))
+ raise SerializationError(msg) from err
+ else:
+ return serialized
+
+ def body(self, data, data_type, **kwargs):
+ """Serialize data intended for a request body.
+
+ :param data: The data to be serialized.
+ :param str data_type: The type to be serialized from.
+ :rtype: dict
+ :raises: SerializationError if serialization fails.
+ :raises: ValueError if data is None
+ """
+
+ # Just in case this is a dict
+ internal_data_type_str = data_type.strip("[]{}")
+ internal_data_type = self.dependencies.get(internal_data_type_str, None)
+ try:
+ is_xml_model_serialization = kwargs["is_xml"]
+ except KeyError:
+ if internal_data_type and issubclass(internal_data_type, Model):
+ is_xml_model_serialization = kwargs.setdefault("is_xml", internal_data_type.is_xml_model())
+ else:
+ is_xml_model_serialization = False
+ if internal_data_type and not isinstance(internal_data_type, Enum):
+ try:
+ deserializer = Deserializer(self.dependencies)
+ # Since it's on serialization, it's almost sure that format is not JSON REST
+ # We're not able to deal with additional properties for now.
+ deserializer.additional_properties_detection = False
+ if is_xml_model_serialization:
+ deserializer.key_extractors = [ # type: ignore
+ attribute_key_case_insensitive_extractor,
+ ]
+ else:
+ deserializer.key_extractors = [
+ rest_key_case_insensitive_extractor,
+ attribute_key_case_insensitive_extractor,
+ last_rest_key_case_insensitive_extractor,
+ ]
+ data = deserializer._deserialize(data_type, data)
+ except DeserializationError as err:
+ raise SerializationError("Unable to build a model: " + str(err)) from err
+
+ return self._serialize(data, data_type, **kwargs)
+
+ def url(self, name, data, data_type, **kwargs):
+ """Serialize data intended for a URL path.
+
+ :param data: The data to be serialized.
+ :param str data_type: The type to be serialized from.
+ :rtype: str
+ :raises: TypeError if serialization fails.
+ :raises: ValueError if data is None
+ """
+ try:
+ output = self.serialize_data(data, data_type, **kwargs)
+ if data_type == "bool":
+ output = json.dumps(output)
+
+ if kwargs.get("skip_quote") is True:
+ output = str(output)
+ output = output.replace("{", quote("{")).replace("}", quote("}"))
+ else:
+ output = quote(str(output), safe="")
+ except SerializationError:
+ raise TypeError("{} must be type {}.".format(name, data_type))
+ else:
+ return output
+
+ def query(self, name, data, data_type, **kwargs):
+ """Serialize data intended for a URL query.
+
+ :param data: The data to be serialized.
+ :param str data_type: The type to be serialized from.
+ :keyword bool skip_quote: Whether to skip quote the serialized result.
+ Defaults to False.
+ :rtype: str, list
+ :raises: TypeError if serialization fails.
+ :raises: ValueError if data is None
+ """
+ try:
+ # Treat the list aside, since we don't want to encode the div separator
+ if data_type.startswith("["):
+ internal_data_type = data_type[1:-1]
+ do_quote = not kwargs.get("skip_quote", False)
+ return self.serialize_iter(data, internal_data_type, do_quote=do_quote, **kwargs)
+
+ # Not a list, regular serialization
+ output = self.serialize_data(data, data_type, **kwargs)
+ if data_type == "bool":
+ output = json.dumps(output)
+ if kwargs.get("skip_quote") is True:
+ output = str(output)
+ else:
+ output = quote(str(output), safe="")
+ except SerializationError:
+ raise TypeError("{} must be type {}.".format(name, data_type))
+ else:
+ return str(output)
+
+ def header(self, name, data, data_type, **kwargs):
+ """Serialize data intended for a request header.
+
+ :param data: The data to be serialized.
+ :param str data_type: The type to be serialized from.
+ :rtype: str
+ :raises: TypeError if serialization fails.
+ :raises: ValueError if data is None
+ """
+ try:
+ if data_type in ["[str]"]:
+ data = ["" if d is None else d for d in data]
+
+ output = self.serialize_data(data, data_type, **kwargs)
+ if data_type == "bool":
+ output = json.dumps(output)
+ except SerializationError:
+ raise TypeError("{} must be type {}.".format(name, data_type))
+ else:
+ return str(output)
+
+ def serialize_data(self, data, data_type, **kwargs):
+ """Serialize generic data according to supplied data type.
+
+ :param data: The data to be serialized.
+ :param str data_type: The type to be serialized from.
+ :param bool required: Whether it's essential that the data not be
+ empty or None
+ :raises: AttributeError if required data is None.
+ :raises: ValueError if data is None
+ :raises: SerializationError if serialization fails.
+ """
+ if data is None:
+ raise ValueError("No value for given attribute")
+
+ try:
+ if data is CoreNull:
+ return None
+ if data_type in self.basic_types.values():
+ return self.serialize_basic(data, data_type, **kwargs)
+
+ elif data_type in self.serialize_type:
+ return self.serialize_type[data_type](data, **kwargs)
+
+ # If dependencies is empty, try with current data class
+ # It has to be a subclass of Enum anyway
+ enum_type = self.dependencies.get(data_type, data.__class__)
+ if issubclass(enum_type, Enum):
+ return Serializer.serialize_enum(data, enum_obj=enum_type)
+
+ iter_type = data_type[0] + data_type[-1]
+ if iter_type in self.serialize_type:
+ return self.serialize_type[iter_type](data, data_type[1:-1], **kwargs)
+
+ except (ValueError, TypeError) as err:
+ msg = "Unable to serialize value: {!r} as type: {!r}."
+ raise SerializationError(msg.format(data, data_type)) from err
+ else:
+ return self._serialize(data, **kwargs)
+
+ @classmethod
+ def _get_custom_serializers(cls, data_type, **kwargs):
+ custom_serializer = kwargs.get("basic_types_serializers", {}).get(data_type)
+ if custom_serializer:
+ return custom_serializer
+ if kwargs.get("is_xml", False):
+ return cls._xml_basic_types_serializers.get(data_type)
+
+ @classmethod
+ def serialize_basic(cls, data, data_type, **kwargs):
+ """Serialize basic builting data type.
+ Serializes objects to str, int, float or bool.
+
+ Possible kwargs:
+ - basic_types_serializers dict[str, callable] : If set, use the callable as serializer
+ - is_xml bool : If set, use xml_basic_types_serializers
+
+ :param data: Object to be serialized.
+ :param str data_type: Type of object in the iterable.
+ """
+ custom_serializer = cls._get_custom_serializers(data_type, **kwargs)
+ if custom_serializer:
+ return custom_serializer(data)
+ if data_type == "str":
+ return cls.serialize_unicode(data)
+ return eval(data_type)(data) # nosec
+
+ @classmethod
+ def serialize_unicode(cls, data):
+ """Special handling for serializing unicode strings in Py2.
+ Encode to UTF-8 if unicode, otherwise handle as a str.
+
+ :param data: Object to be serialized.
+ :rtype: str
+ """
+ try: # If I received an enum, return its value
+ return data.value
+ except AttributeError:
+ pass
+
+ try:
+ if isinstance(data, unicode): # type: ignore
+ # Don't change it, JSON and XML ElementTree are totally able
+ # to serialize correctly u'' strings
+ return data
+ except NameError:
+ return str(data)
+ else:
+ return str(data)
+
+ def serialize_iter(self, data, iter_type, div=None, **kwargs):
+ """Serialize iterable.
+
+ Supported kwargs:
+ - serialization_ctxt dict : The current entry of _attribute_map, or same format.
+ serialization_ctxt['type'] should be same as data_type.
+ - is_xml bool : If set, serialize as XML
+
+ :param list attr: Object to be serialized.
+ :param str iter_type: Type of object in the iterable.
+ :param bool required: Whether the objects in the iterable must
+ not be None or empty.
+ :param str div: If set, this str will be used to combine the elements
+ in the iterable into a combined string. Default is 'None'.
+ :keyword bool do_quote: Whether to quote the serialized result of each iterable element.
+ Defaults to False.
+ :rtype: list, str
+ """
+ if isinstance(data, str):
+ raise SerializationError("Refuse str type as a valid iter type.")
+
+ serialization_ctxt = kwargs.get("serialization_ctxt", {})
+ is_xml = kwargs.get("is_xml", False)
+
+ serialized = []
+ for d in data:
+ try:
+ serialized.append(self.serialize_data(d, iter_type, **kwargs))
+ except ValueError as err:
+ if isinstance(err, SerializationError):
+ raise
+ serialized.append(None)
+
+ if kwargs.get("do_quote", False):
+ serialized = ["" if s is None else quote(str(s), safe="") for s in serialized]
+
+ if div:
+ serialized = ["" if s is None else str(s) for s in serialized]
+ serialized = div.join(serialized)
+
+ if "xml" in serialization_ctxt or is_xml:
+ # XML serialization is more complicated
+ xml_desc = serialization_ctxt.get("xml", {})
+ xml_name = xml_desc.get("name")
+ if not xml_name:
+ xml_name = serialization_ctxt["key"]
+
+ # Create a wrap node if necessary (use the fact that Element and list have "append")
+ is_wrapped = xml_desc.get("wrapped", False)
+ node_name = xml_desc.get("itemsName", xml_name)
+ if is_wrapped:
+ final_result = _create_xml_node(xml_name, xml_desc.get("prefix", None), xml_desc.get("ns", None))
+ else:
+ final_result = []
+ # All list elements to "local_node"
+ for el in serialized:
+ if isinstance(el, ET.Element):
+ el_node = el
+ else:
+ el_node = _create_xml_node(node_name, xml_desc.get("prefix", None), xml_desc.get("ns", None))
+ if el is not None: # Otherwise it writes "None" :-p
+ el_node.text = str(el)
+ final_result.append(el_node)
+ return final_result
+ return serialized
+
+ def serialize_dict(self, attr, dict_type, **kwargs):
+ """Serialize a dictionary of objects.
+
+ :param dict attr: Object to be serialized.
+ :param str dict_type: Type of object in the dictionary.
+ :param bool required: Whether the objects in the dictionary must
+ not be None or empty.
+ :rtype: dict
+ """
+ serialization_ctxt = kwargs.get("serialization_ctxt", {})
+ serialized = {}
+ for key, value in attr.items():
+ try:
+ serialized[self.serialize_unicode(key)] = self.serialize_data(value, dict_type, **kwargs)
+ except ValueError as err:
+ if isinstance(err, SerializationError):
+ raise
+ serialized[self.serialize_unicode(key)] = None
+
+ if "xml" in serialization_ctxt:
+ # XML serialization is more complicated
+ xml_desc = serialization_ctxt["xml"]
+ xml_name = xml_desc["name"]
+
+ final_result = _create_xml_node(xml_name, xml_desc.get("prefix", None), xml_desc.get("ns", None))
+ for key, value in serialized.items():
+ ET.SubElement(final_result, key).text = value
+ return final_result
+
+ return serialized
+
+ def serialize_object(self, attr, **kwargs):
+ """Serialize a generic object.
+ This will be handled as a dictionary. If object passed in is not
+ a basic type (str, int, float, dict, list) it will simply be
+ cast to str.
+
+ :param dict attr: Object to be serialized.
+ :rtype: dict or str
+ """
+ if attr is None:
+ return None
+ if isinstance(attr, ET.Element):
+ return attr
+ obj_type = type(attr)
+ if obj_type in self.basic_types:
+ return self.serialize_basic(attr, self.basic_types[obj_type], **kwargs)
+ if obj_type is _long_type:
+ return self.serialize_long(attr)
+ if obj_type is str:
+ return self.serialize_unicode(attr)
+ if obj_type is datetime.datetime:
+ return self.serialize_iso(attr)
+ if obj_type is datetime.date:
+ return self.serialize_date(attr)
+ if obj_type is datetime.time:
+ return self.serialize_time(attr)
+ if obj_type is datetime.timedelta:
+ return self.serialize_duration(attr)
+ if obj_type is decimal.Decimal:
+ return self.serialize_decimal(attr)
+
+ # If it's a model or I know this dependency, serialize as a Model
+ elif obj_type in self.dependencies.values() or isinstance(attr, Model):
+ return self._serialize(attr)
+
+ if obj_type == dict:
+ serialized = {}
+ for key, value in attr.items():
+ try:
+ serialized[self.serialize_unicode(key)] = self.serialize_object(value, **kwargs)
+ except ValueError:
+ serialized[self.serialize_unicode(key)] = None
+ return serialized
+
+ if obj_type == list:
+ serialized = []
+ for obj in attr:
+ try:
+ serialized.append(self.serialize_object(obj, **kwargs))
+ except ValueError:
+ pass
+ return serialized
+ return str(attr)
+
+ @staticmethod
+ def serialize_enum(attr, enum_obj=None):
+ try:
+ result = attr.value
+ except AttributeError:
+ result = attr
+ try:
+ enum_obj(result) # type: ignore
+ return result
+ except ValueError:
+ for enum_value in enum_obj: # type: ignore
+ if enum_value.value.lower() == str(attr).lower():
+ return enum_value.value
+ error = "{!r} is not valid value for enum {!r}"
+ raise SerializationError(error.format(attr, enum_obj))
+
+ @staticmethod
+ def serialize_bytearray(attr, **kwargs):
+ """Serialize bytearray into base-64 string.
+
+ :param attr: Object to be serialized.
+ :rtype: str
+ """
+ return b64encode(attr).decode()
+
+ @staticmethod
+ def serialize_base64(attr, **kwargs):
+ """Serialize str into base-64 string.
+
+ :param attr: Object to be serialized.
+ :rtype: str
+ """
+ encoded = b64encode(attr).decode("ascii")
+ return encoded.strip("=").replace("+", "-").replace("/", "_")
+
+ @staticmethod
+ def serialize_decimal(attr, **kwargs):
+ """Serialize Decimal object to float.
+
+ :param attr: Object to be serialized.
+ :rtype: float
+ """
+ return float(attr)
+
+ @staticmethod
+ def serialize_long(attr, **kwargs):
+ """Serialize long (Py2) or int (Py3).
+
+ :param attr: Object to be serialized.
+ :rtype: int/long
+ """
+ return _long_type(attr)
+
+ @staticmethod
+ def serialize_date(attr, **kwargs):
+ """Serialize Date object into ISO-8601 formatted string.
+
+ :param Date attr: Object to be serialized.
+ :rtype: str
+ """
+ if isinstance(attr, str):
+ attr = isodate.parse_date(attr)
+ t = "{:04}-{:02}-{:02}".format(attr.year, attr.month, attr.day)
+ return t
+
+ @staticmethod
+ def serialize_time(attr, **kwargs):
+ """Serialize Time object into ISO-8601 formatted string.
+
+ :param datetime.time attr: Object to be serialized.
+ :rtype: str
+ """
+ if isinstance(attr, str):
+ attr = isodate.parse_time(attr)
+ t = "{:02}:{:02}:{:02}".format(attr.hour, attr.minute, attr.second)
+ if attr.microsecond:
+ t += ".{:02}".format(attr.microsecond)
+ return t
+
+ @staticmethod
+ def serialize_duration(attr, **kwargs):
+ """Serialize TimeDelta object into ISO-8601 formatted string.
+
+ :param TimeDelta attr: Object to be serialized.
+ :rtype: str
+ """
+ if isinstance(attr, str):
+ attr = isodate.parse_duration(attr)
+ return isodate.duration_isoformat(attr)
+
+ @staticmethod
+ def serialize_rfc(attr, **kwargs):
+ """Serialize Datetime object into RFC-1123 formatted string.
+
+ :param Datetime attr: Object to be serialized.
+ :rtype: str
+ :raises: TypeError if format invalid.
+ """
+ try:
+ if not attr.tzinfo:
+ _LOGGER.warning("Datetime with no tzinfo will be considered UTC.")
+ utc = attr.utctimetuple()
+ except AttributeError:
+ raise TypeError("RFC1123 object must be valid Datetime object.")
+
+ return "{}, {:02} {} {:04} {:02}:{:02}:{:02} GMT".format(
+ Serializer.days[utc.tm_wday],
+ utc.tm_mday,
+ Serializer.months[utc.tm_mon],
+ utc.tm_year,
+ utc.tm_hour,
+ utc.tm_min,
+ utc.tm_sec,
+ )
+
+ @staticmethod
+ def serialize_iso(attr, **kwargs):
+ """Serialize Datetime object into ISO-8601 formatted string.
+
+ :param Datetime attr: Object to be serialized.
+ :rtype: str
+ :raises: SerializationError if format invalid.
+ """
+ if isinstance(attr, str):
+ attr = isodate.parse_datetime(attr)
+ try:
+ if not attr.tzinfo:
+ _LOGGER.warning("Datetime with no tzinfo will be considered UTC.")
+ utc = attr.utctimetuple()
+ if utc.tm_year > 9999 or utc.tm_year < 1:
+ raise OverflowError("Hit max or min date")
+
+ microseconds = str(attr.microsecond).rjust(6, "0").rstrip("0").ljust(3, "0")
+ if microseconds:
+ microseconds = "." + microseconds
+ date = "{:04}-{:02}-{:02}T{:02}:{:02}:{:02}".format(
+ utc.tm_year, utc.tm_mon, utc.tm_mday, utc.tm_hour, utc.tm_min, utc.tm_sec
+ )
+ return date + microseconds + "Z"
+ except (ValueError, OverflowError) as err:
+ msg = "Unable to serialize datetime object."
+ raise SerializationError(msg) from err
+ except AttributeError as err:
+ msg = "ISO-8601 object must be valid Datetime object."
+ raise TypeError(msg) from err
+
+ @staticmethod
+ def serialize_unix(attr, **kwargs):
+ """Serialize Datetime object into IntTime format.
+ This is represented as seconds.
+
+ :param Datetime attr: Object to be serialized.
+ :rtype: int
+ :raises: SerializationError if format invalid
+ """
+ if isinstance(attr, int):
+ return attr
+ try:
+ if not attr.tzinfo:
+ _LOGGER.warning("Datetime with no tzinfo will be considered UTC.")
+ return int(calendar.timegm(attr.utctimetuple()))
+ except AttributeError:
+ raise TypeError("Unix time object must be valid Datetime object.")
+
+
+def rest_key_extractor(attr, attr_desc, data):
+ key = attr_desc["key"]
+ working_data = data
+
+ while "." in key:
+ # Need the cast, as for some reasons "split" is typed as list[str | Any]
+ dict_keys = cast(List[str], _FLATTEN.split(key))
+ if len(dict_keys) == 1:
+ key = _decode_attribute_map_key(dict_keys[0])
+ break
+ working_key = _decode_attribute_map_key(dict_keys[0])
+ working_data = working_data.get(working_key, data)
+ if working_data is None:
+ # If at any point while following flatten JSON path see None, it means
+ # that all properties under are None as well
+ return None
+ key = ".".join(dict_keys[1:])
+
+ return working_data.get(key)
+
+
+def rest_key_case_insensitive_extractor(attr, attr_desc, data):
+ key = attr_desc["key"]
+ working_data = data
+
+ while "." in key:
+ dict_keys = _FLATTEN.split(key)
+ if len(dict_keys) == 1:
+ key = _decode_attribute_map_key(dict_keys[0])
+ break
+ working_key = _decode_attribute_map_key(dict_keys[0])
+ working_data = attribute_key_case_insensitive_extractor(working_key, None, working_data)
+ if working_data is None:
+ # If at any point while following flatten JSON path see None, it means
+ # that all properties under are None as well
+ return None
+ key = ".".join(dict_keys[1:])
+
+ if working_data:
+ return attribute_key_case_insensitive_extractor(key, None, working_data)
+
+
+def last_rest_key_extractor(attr, attr_desc, data):
+ """Extract the attribute in "data" based on the last part of the JSON path key."""
+ key = attr_desc["key"]
+ dict_keys = _FLATTEN.split(key)
+ return attribute_key_extractor(dict_keys[-1], None, data)
+
+
+def last_rest_key_case_insensitive_extractor(attr, attr_desc, data):
+ """Extract the attribute in "data" based on the last part of the JSON path key.
+
+ This is the case insensitive version of "last_rest_key_extractor"
+ """
+ key = attr_desc["key"]
+ dict_keys = _FLATTEN.split(key)
+ return attribute_key_case_insensitive_extractor(dict_keys[-1], None, data)
+
+
+def attribute_key_extractor(attr, _, data):
+ return data.get(attr)
+
+
+def attribute_key_case_insensitive_extractor(attr, _, data):
+ found_key = None
+ lower_attr = attr.lower()
+ for key in data:
+ if lower_attr == key.lower():
+ found_key = key
+ break
+
+ return data.get(found_key)
+
+
+def _extract_name_from_internal_type(internal_type):
+ """Given an internal type XML description, extract correct XML name with namespace.
+
+ :param dict internal_type: An model type
+ :rtype: tuple
+ :returns: A tuple XML name + namespace dict
+ """
+ internal_type_xml_map = getattr(internal_type, "_xml_map", {})
+ xml_name = internal_type_xml_map.get("name", internal_type.__name__)
+ xml_ns = internal_type_xml_map.get("ns", None)
+ if xml_ns:
+ xml_name = "{{{}}}{}".format(xml_ns, xml_name)
+ return xml_name
+
+
+def xml_key_extractor(attr, attr_desc, data):
+ if isinstance(data, dict):
+ return None
+
+ # Test if this model is XML ready first
+ if not isinstance(data, ET.Element):
+ return None
+
+ xml_desc = attr_desc.get("xml", {})
+ xml_name = xml_desc.get("name", attr_desc["key"])
+
+ # Look for a children
+ is_iter_type = attr_desc["type"].startswith("[")
+ is_wrapped = xml_desc.get("wrapped", False)
+ internal_type = attr_desc.get("internalType", None)
+ internal_type_xml_map = getattr(internal_type, "_xml_map", {})
+
+ # Integrate namespace if necessary
+ xml_ns = xml_desc.get("ns", internal_type_xml_map.get("ns", None))
+ if xml_ns:
+ xml_name = "{{{}}}{}".format(xml_ns, xml_name)
+
+ # If it's an attribute, that's simple
+ if xml_desc.get("attr", False):
+ return data.get(xml_name)
+
+ # If it's x-ms-text, that's simple too
+ if xml_desc.get("text", False):
+ return data.text
+
+ # Scenario where I take the local name:
+ # - Wrapped node
+ # - Internal type is an enum (considered basic types)
+ # - Internal type has no XML/Name node
+ if is_wrapped or (internal_type and (issubclass(internal_type, Enum) or "name" not in internal_type_xml_map)):
+ children = data.findall(xml_name)
+ # If internal type has a local name and it's not a list, I use that name
+ elif not is_iter_type and internal_type and "name" in internal_type_xml_map:
+ xml_name = _extract_name_from_internal_type(internal_type)
+ children = data.findall(xml_name)
+ # That's an array
+ else:
+ if internal_type: # Complex type, ignore itemsName and use the complex type name
+ items_name = _extract_name_from_internal_type(internal_type)
+ else:
+ items_name = xml_desc.get("itemsName", xml_name)
+ children = data.findall(items_name)
+
+ if len(children) == 0:
+ if is_iter_type:
+ if is_wrapped:
+ return None # is_wrapped no node, we want None
+ else:
+ return [] # not wrapped, assume empty list
+ return None # Assume it's not there, maybe an optional node.
+
+ # If is_iter_type and not wrapped, return all found children
+ if is_iter_type:
+ if not is_wrapped:
+ return children
+ else: # Iter and wrapped, should have found one node only (the wrap one)
+ if len(children) != 1:
+ raise DeserializationError(
+ "Tried to deserialize an array not wrapped, and found several nodes '{}'. Maybe you should declare this array as wrapped?".format(
+ xml_name
+ )
+ )
+ return list(children[0]) # Might be empty list and that's ok.
+
+ # Here it's not a itertype, we should have found one element only or empty
+ if len(children) > 1:
+ raise DeserializationError("Find several XML '{}' where it was not expected".format(xml_name))
+ return children[0]
+
+
+class Deserializer(object):
+ """Response object model deserializer.
+
+ :param dict classes: Class type dictionary for deserializing complex types.
+ :ivar list key_extractors: Ordered list of extractors to be used by this deserializer.
+ """
+
+ basic_types = {str: "str", int: "int", bool: "bool", float: "float"}
+
+ valid_date = re.compile(r"\d{4}[-]\d{2}[-]\d{2}T\d{2}:\d{2}:\d{2}" r"\.?\d*Z?[-+]?[\d{2}]?:?[\d{2}]?")
+
+ def __init__(self, classes: Optional[Mapping[str, type]] = None):
+ self.deserialize_type = {
+ "iso-8601": Deserializer.deserialize_iso,
+ "rfc-1123": Deserializer.deserialize_rfc,
+ "unix-time": Deserializer.deserialize_unix,
+ "duration": Deserializer.deserialize_duration,
+ "date": Deserializer.deserialize_date,
+ "time": Deserializer.deserialize_time,
+ "decimal": Deserializer.deserialize_decimal,
+ "long": Deserializer.deserialize_long,
+ "bytearray": Deserializer.deserialize_bytearray,
+ "base64": Deserializer.deserialize_base64,
+ "object": self.deserialize_object,
+ "[]": self.deserialize_iter,
+ "{}": self.deserialize_dict,
+ }
+ self.deserialize_expected_types = {
+ "duration": (isodate.Duration, datetime.timedelta),
+ "iso-8601": (datetime.datetime),
+ }
+ self.dependencies: Dict[str, type] = dict(classes) if classes else {}
+ self.key_extractors = [rest_key_extractor, xml_key_extractor]
+ # Additional properties only works if the "rest_key_extractor" is used to
+ # extract the keys. Making it to work whatever the key extractor is too much
+ # complicated, with no real scenario for now.
+ # So adding a flag to disable additional properties detection. This flag should be
+ # used if your expect the deserialization to NOT come from a JSON REST syntax.
+ # Otherwise, result are unexpected
+ self.additional_properties_detection = True
+
+ def __call__(self, target_obj, response_data, content_type=None):
+ """Call the deserializer to process a REST response.
+
+ :param str target_obj: Target data type to deserialize to.
+ :param requests.Response response_data: REST response object.
+ :param str content_type: Swagger "produces" if available.
+ :raises: DeserializationError if deserialization fails.
+ :return: Deserialized object.
+ """
+ data = self._unpack_content(response_data, content_type)
+ return self._deserialize(target_obj, data)
+
+ def _deserialize(self, target_obj, data):
+ """Call the deserializer on a model.
+
+ Data needs to be already deserialized as JSON or XML ElementTree
+
+ :param str target_obj: Target data type to deserialize to.
+ :param object data: Object to deserialize.
+ :raises: DeserializationError if deserialization fails.
+ :return: Deserialized object.
+ """
+ # This is already a model, go recursive just in case
+ if hasattr(data, "_attribute_map"):
+ constants = [name for name, config in getattr(data, "_validation", {}).items() if config.get("constant")]
+ try:
+ for attr, mapconfig in data._attribute_map.items():
+ if attr in constants:
+ continue
+ value = getattr(data, attr)
+ if value is None:
+ continue
+ local_type = mapconfig["type"]
+ internal_data_type = local_type.strip("[]{}")
+ if internal_data_type not in self.dependencies or isinstance(internal_data_type, Enum):
+ continue
+ setattr(data, attr, self._deserialize(local_type, value))
+ return data
+ except AttributeError:
+ return
+
+ response, class_name = self._classify_target(target_obj, data)
+
+ if isinstance(response, str):
+ return self.deserialize_data(data, response)
+ elif isinstance(response, type) and issubclass(response, Enum):
+ return self.deserialize_enum(data, response)
+
+ if data is None:
+ return data
+ try:
+ attributes = response._attribute_map # type: ignore
+ d_attrs = {}
+ for attr, attr_desc in attributes.items():
+ # Check empty string. If it's not empty, someone has a real "additionalProperties"...
+ if attr == "additional_properties" and attr_desc["key"] == "":
+ continue
+ raw_value = None
+ # Enhance attr_desc with some dynamic data
+ attr_desc = attr_desc.copy() # Do a copy, do not change the real one
+ internal_data_type = attr_desc["type"].strip("[]{}")
+ if internal_data_type in self.dependencies:
+ attr_desc["internalType"] = self.dependencies[internal_data_type]
+
+ for key_extractor in self.key_extractors:
+ found_value = key_extractor(attr, attr_desc, data)
+ if found_value is not None:
+ if raw_value is not None and raw_value != found_value:
+ msg = (
+ "Ignoring extracted value '%s' from %s for key '%s'"
+ " (duplicate extraction, follow extractors order)"
+ )
+ _LOGGER.warning(msg, found_value, key_extractor, attr)
+ continue
+ raw_value = found_value
+
+ value = self.deserialize_data(raw_value, attr_desc["type"])
+ d_attrs[attr] = value
+ except (AttributeError, TypeError, KeyError) as err:
+ msg = "Unable to deserialize to object: " + class_name # type: ignore
+ raise DeserializationError(msg) from err
+ else:
+ additional_properties = self._build_additional_properties(attributes, data)
+ return self._instantiate_model(response, d_attrs, additional_properties)
+
+ def _build_additional_properties(self, attribute_map, data):
+ if not self.additional_properties_detection:
+ return None
+ if "additional_properties" in attribute_map and attribute_map.get("additional_properties", {}).get("key") != "":
+ # Check empty string. If it's not empty, someone has a real "additionalProperties"
+ return None
+ if isinstance(data, ET.Element):
+ data = {el.tag: el.text for el in data}
+
+ known_keys = {
+ _decode_attribute_map_key(_FLATTEN.split(desc["key"])[0])
+ for desc in attribute_map.values()
+ if desc["key"] != ""
+ }
+ present_keys = set(data.keys())
+ missing_keys = present_keys - known_keys
+ return {key: data[key] for key in missing_keys}
+
+ def _classify_target(self, target, data):
+ """Check to see whether the deserialization target object can
+ be classified into a subclass.
+ Once classification has been determined, initialize object.
+
+ :param str target: The target object type to deserialize to.
+ :param str/dict data: The response data to deserialize.
+ """
+ if target is None:
+ return None, None
+
+ if isinstance(target, str):
+ try:
+ target = self.dependencies[target]
+ except KeyError:
+ return target, target
+
+ try:
+ target = target._classify(data, self.dependencies) # type: ignore
+ except AttributeError:
+ pass # Target is not a Model, no classify
+ return target, target.__class__.__name__ # type: ignore
+
+ def failsafe_deserialize(self, target_obj, data, content_type=None):
+ """Ignores any errors encountered in deserialization,
+ and falls back to not deserializing the object. Recommended
+ for use in error deserialization, as we want to return the
+ HttpResponseError to users, and not have them deal with
+ a deserialization error.
+
+ :param str target_obj: The target object type to deserialize to.
+ :param str/dict data: The response data to deserialize.
+ :param str content_type: Swagger "produces" if available.
+ """
+ try:
+ return self(target_obj, data, content_type=content_type)
+ except:
+ _LOGGER.debug(
+ "Ran into a deserialization error. Ignoring since this is failsafe deserialization", exc_info=True
+ )
+ return None
+
+ @staticmethod
+ def _unpack_content(raw_data, content_type=None):
+ """Extract the correct structure for deserialization.
+
+ If raw_data is a PipelineResponse, try to extract the result of RawDeserializer.
+ if we can't, raise. Your Pipeline should have a RawDeserializer.
+
+ If not a pipeline response and raw_data is bytes or string, use content-type
+ to decode it. If no content-type, try JSON.
+
+ If raw_data is something else, bypass all logic and return it directly.
+
+ :param raw_data: Data to be processed.
+ :param content_type: How to parse if raw_data is a string/bytes.
+ :raises JSONDecodeError: If JSON is requested and parsing is impossible.
+ :raises UnicodeDecodeError: If bytes is not UTF8
+ """
+ # Assume this is enough to detect a Pipeline Response without importing it
+ context = getattr(raw_data, "context", {})
+ if context:
+ if RawDeserializer.CONTEXT_NAME in context:
+ return context[RawDeserializer.CONTEXT_NAME]
+ raise ValueError("This pipeline didn't have the RawDeserializer policy; can't deserialize")
+
+ # Assume this is enough to recognize universal_http.ClientResponse without importing it
+ if hasattr(raw_data, "body"):
+ return RawDeserializer.deserialize_from_http_generics(raw_data.text(), raw_data.headers)
+
+ # Assume this enough to recognize requests.Response without importing it.
+ if hasattr(raw_data, "_content_consumed"):
+ return RawDeserializer.deserialize_from_http_generics(raw_data.text, raw_data.headers)
+
+ if isinstance(raw_data, (str, bytes)) or hasattr(raw_data, "read"):
+ return RawDeserializer.deserialize_from_text(raw_data, content_type) # type: ignore
+ return raw_data
+
+ def _instantiate_model(self, response, attrs, additional_properties=None):
+ """Instantiate a response model passing in deserialized args.
+
+ :param response: The response model class.
+ :param d_attrs: The deserialized response attributes.
+ """
+ if callable(response):
+ subtype = getattr(response, "_subtype_map", {})
+ try:
+ readonly = [k for k, v in response._validation.items() if v.get("readonly")]
+ const = [k for k, v in response._validation.items() if v.get("constant")]
+ kwargs = {k: v for k, v in attrs.items() if k not in subtype and k not in readonly + const}
+ response_obj = response(**kwargs)
+ for attr in readonly:
+ setattr(response_obj, attr, attrs.get(attr))
+ if additional_properties:
+ response_obj.additional_properties = additional_properties
+ return response_obj
+ except TypeError as err:
+ msg = "Unable to deserialize {} into model {}. ".format(kwargs, response) # type: ignore
+ raise DeserializationError(msg + str(err))
+ else:
+ try:
+ for attr, value in attrs.items():
+ setattr(response, attr, value)
+ return response
+ except Exception as exp:
+ msg = "Unable to populate response model. "
+ msg += "Type: {}, Error: {}".format(type(response), exp)
+ raise DeserializationError(msg)
+
+ def deserialize_data(self, data, data_type):
+ """Process data for deserialization according to data type.
+
+ :param str data: The response string to be deserialized.
+ :param str data_type: The type to deserialize to.
+ :raises: DeserializationError if deserialization fails.
+ :return: Deserialized object.
+ """
+ if data is None:
+ return data
+
+ try:
+ if not data_type:
+ return data
+ if data_type in self.basic_types.values():
+ return self.deserialize_basic(data, data_type)
+ if data_type in self.deserialize_type:
+ if isinstance(data, self.deserialize_expected_types.get(data_type, tuple())):
+ return data
+
+ is_a_text_parsing_type = lambda x: x not in ["object", "[]", r"{}"]
+ if isinstance(data, ET.Element) and is_a_text_parsing_type(data_type) and not data.text:
+ return None
+ data_val = self.deserialize_type[data_type](data)
+ return data_val
+
+ iter_type = data_type[0] + data_type[-1]
+ if iter_type in self.deserialize_type:
+ return self.deserialize_type[iter_type](data, data_type[1:-1])
+
+ obj_type = self.dependencies[data_type]
+ if issubclass(obj_type, Enum):
+ if isinstance(data, ET.Element):
+ data = data.text
+ return self.deserialize_enum(data, obj_type)
+
+ except (ValueError, TypeError, AttributeError) as err:
+ msg = "Unable to deserialize response data."
+ msg += " Data: {}, {}".format(data, data_type)
+ raise DeserializationError(msg) from err
+ else:
+ return self._deserialize(obj_type, data)
+
+ def deserialize_iter(self, attr, iter_type):
+ """Deserialize an iterable.
+
+ :param list attr: Iterable to be deserialized.
+ :param str iter_type: The type of object in the iterable.
+ :rtype: list
+ """
+ if attr is None:
+ return None
+ if isinstance(attr, ET.Element): # If I receive an element here, get the children
+ attr = list(attr)
+ if not isinstance(attr, (list, set)):
+ raise DeserializationError("Cannot deserialize as [{}] an object of type {}".format(iter_type, type(attr)))
+ return [self.deserialize_data(a, iter_type) for a in attr]
+
+ def deserialize_dict(self, attr, dict_type):
+ """Deserialize a dictionary.
+
+ :param dict/list attr: Dictionary to be deserialized. Also accepts
+ a list of key, value pairs.
+ :param str dict_type: The object type of the items in the dictionary.
+ :rtype: dict
+ """
+ if isinstance(attr, list):
+ return {x["key"]: self.deserialize_data(x["value"], dict_type) for x in attr}
+
+ if isinstance(attr, ET.Element):
+ # Transform <Key>value</Key> into {"Key": "value"}
+ attr = {el.tag: el.text for el in attr}
+ return {k: self.deserialize_data(v, dict_type) for k, v in attr.items()}
+
+ def deserialize_object(self, attr, **kwargs):
+ """Deserialize a generic object.
+ This will be handled as a dictionary.
+
+ :param dict attr: Dictionary to be deserialized.
+ :rtype: dict
+ :raises: TypeError if non-builtin datatype encountered.
+ """
+ if attr is None:
+ return None
+ if isinstance(attr, ET.Element):
+ # Do no recurse on XML, just return the tree as-is
+ return attr
+ if isinstance(attr, str):
+ return self.deserialize_basic(attr, "str")
+ obj_type = type(attr)
+ if obj_type in self.basic_types:
+ return self.deserialize_basic(attr, self.basic_types[obj_type])
+ if obj_type is _long_type:
+ return self.deserialize_long(attr)
+
+ if obj_type == dict:
+ deserialized = {}
+ for key, value in attr.items():
+ try:
+ deserialized[key] = self.deserialize_object(value, **kwargs)
+ except ValueError:
+ deserialized[key] = None
+ return deserialized
+
+ if obj_type == list:
+ deserialized = []
+ for obj in attr:
+ try:
+ deserialized.append(self.deserialize_object(obj, **kwargs))
+ except ValueError:
+ pass
+ return deserialized
+
+ else:
+ error = "Cannot deserialize generic object with type: "
+ raise TypeError(error + str(obj_type))
+
+ def deserialize_basic(self, attr, data_type):
+ """Deserialize basic builtin data type from string.
+ Will attempt to convert to str, int, float and bool.
+ This function will also accept '1', '0', 'true' and 'false' as
+ valid bool values.
+
+ :param str attr: response string to be deserialized.
+ :param str data_type: deserialization data type.
+ :rtype: str, int, float or bool
+ :raises: TypeError if string format is not valid.
+ """
+ # If we're here, data is supposed to be a basic type.
+ # If it's still an XML node, take the text
+ if isinstance(attr, ET.Element):
+ attr = attr.text
+ if not attr:
+ if data_type == "str":
+ # None or '', node <a/> is empty string.
+ return ""
+ else:
+ # None or '', node <a/> with a strong type is None.
+ # Don't try to model "empty bool" or "empty int"
+ return None
+
+ if data_type == "bool":
+ if attr in [True, False, 1, 0]:
+ return bool(attr)
+ elif isinstance(attr, str):
+ if attr.lower() in ["true", "1"]:
+ return True
+ elif attr.lower() in ["false", "0"]:
+ return False
+ raise TypeError("Invalid boolean value: {}".format(attr))
+
+ if data_type == "str":
+ return self.deserialize_unicode(attr)
+ return eval(data_type)(attr) # nosec
+
+ @staticmethod
+ def deserialize_unicode(data):
+ """Preserve unicode objects in Python 2, otherwise return data
+ as a string.
+
+ :param str data: response string to be deserialized.
+ :rtype: str or unicode
+ """
+ # We might be here because we have an enum modeled as string,
+ # and we try to deserialize a partial dict with enum inside
+ if isinstance(data, Enum):
+ return data
+
+ # Consider this is real string
+ try:
+ if isinstance(data, unicode): # type: ignore
+ return data
+ except NameError:
+ return str(data)
+ else:
+ return str(data)
+
+ @staticmethod
+ def deserialize_enum(data, enum_obj):
+ """Deserialize string into enum object.
+
+ If the string is not a valid enum value it will be returned as-is
+ and a warning will be logged.
+
+ :param str data: Response string to be deserialized. If this value is
+ None or invalid it will be returned as-is.
+ :param Enum enum_obj: Enum object to deserialize to.
+ :rtype: Enum
+ """
+ if isinstance(data, enum_obj) or data is None:
+ return data
+ if isinstance(data, Enum):
+ data = data.value
+ if isinstance(data, int):
+ # Workaround. We might consider remove it in the future.
+ try:
+ return list(enum_obj.__members__.values())[data]
+ except IndexError:
+ error = "{!r} is not a valid index for enum {!r}"
+ raise DeserializationError(error.format(data, enum_obj))
+ try:
+ return enum_obj(str(data))
+ except ValueError:
+ for enum_value in enum_obj:
+ if enum_value.value.lower() == str(data).lower():
+ return enum_value
+ # We don't fail anymore for unknown value, we deserialize as a string
+ _LOGGER.warning("Deserializer is not able to find %s as valid enum in %s", data, enum_obj)
+ return Deserializer.deserialize_unicode(data)
+
+ @staticmethod
+ def deserialize_bytearray(attr):
+ """Deserialize string into bytearray.
+
+ :param str attr: response string to be deserialized.
+ :rtype: bytearray
+ :raises: TypeError if string format invalid.
+ """
+ if isinstance(attr, ET.Element):
+ attr = attr.text
+ return bytearray(b64decode(attr)) # type: ignore
+
+ @staticmethod
+ def deserialize_base64(attr):
+ """Deserialize base64 encoded string into string.
+
+ :param str attr: response string to be deserialized.
+ :rtype: bytearray
+ :raises: TypeError if string format invalid.
+ """
+ if isinstance(attr, ET.Element):
+ attr = attr.text
+ padding = "=" * (3 - (len(attr) + 3) % 4) # type: ignore
+ attr = attr + padding # type: ignore
+ encoded = attr.replace("-", "+").replace("_", "/")
+ return b64decode(encoded)
+
+ @staticmethod
+ def deserialize_decimal(attr):
+ """Deserialize string into Decimal object.
+
+ :param str attr: response string to be deserialized.
+ :rtype: Decimal
+ :raises: DeserializationError if string format invalid.
+ """
+ if isinstance(attr, ET.Element):
+ attr = attr.text
+ try:
+ return decimal.Decimal(str(attr)) # type: ignore
+ except decimal.DecimalException as err:
+ msg = "Invalid decimal {}".format(attr)
+ raise DeserializationError(msg) from err
+
+ @staticmethod
+ def deserialize_long(attr):
+ """Deserialize string into long (Py2) or int (Py3).
+
+ :param str attr: response string to be deserialized.
+ :rtype: long or int
+ :raises: ValueError if string format invalid.
+ """
+ if isinstance(attr, ET.Element):
+ attr = attr.text
+ return _long_type(attr) # type: ignore
+
+ @staticmethod
+ def deserialize_duration(attr):
+ """Deserialize ISO-8601 formatted string into TimeDelta object.
+
+ :param str attr: response string to be deserialized.
+ :rtype: TimeDelta
+ :raises: DeserializationError if string format invalid.
+ """
+ if isinstance(attr, ET.Element):
+ attr = attr.text
+ try:
+ duration = isodate.parse_duration(attr)
+ except (ValueError, OverflowError, AttributeError) as err:
+ msg = "Cannot deserialize duration object."
+ raise DeserializationError(msg) from err
+ else:
+ return duration
+
+ @staticmethod
+ def deserialize_date(attr):
+ """Deserialize ISO-8601 formatted string into Date object.
+
+ :param str attr: response string to be deserialized.
+ :rtype: Date
+ :raises: DeserializationError if string format invalid.
+ """
+ if isinstance(attr, ET.Element):
+ attr = attr.text
+ if re.search(r"[^\W\d_]", attr, re.I + re.U): # type: ignore
+ raise DeserializationError("Date must have only digits and -. Received: %s" % attr)
+ # This must NOT use defaultmonth/defaultday. Using None ensure this raises an exception.
+ return isodate.parse_date(attr, defaultmonth=0, defaultday=0)
+
+ @staticmethod
+ def deserialize_time(attr):
+ """Deserialize ISO-8601 formatted string into time object.
+
+ :param str attr: response string to be deserialized.
+ :rtype: datetime.time
+ :raises: DeserializationError if string format invalid.
+ """
+ if isinstance(attr, ET.Element):
+ attr = attr.text
+ if re.search(r"[^\W\d_]", attr, re.I + re.U): # type: ignore
+ raise DeserializationError("Date must have only digits and -. Received: %s" % attr)
+ return isodate.parse_time(attr)
+
+ @staticmethod
+ def deserialize_rfc(attr):
+ """Deserialize RFC-1123 formatted string into Datetime object.
+
+ :param str attr: response string to be deserialized.
+ :rtype: Datetime
+ :raises: DeserializationError if string format invalid.
+ """
+ if isinstance(attr, ET.Element):
+ attr = attr.text
+ try:
+ parsed_date = email.utils.parsedate_tz(attr) # type: ignore
+ date_obj = datetime.datetime(
+ *parsed_date[:6], tzinfo=_FixedOffset(datetime.timedelta(minutes=(parsed_date[9] or 0) / 60))
+ )
+ if not date_obj.tzinfo:
+ date_obj = date_obj.astimezone(tz=TZ_UTC)
+ except ValueError as err:
+ msg = "Cannot deserialize to rfc datetime object."
+ raise DeserializationError(msg) from err
+ else:
+ return date_obj
+
+ @staticmethod
+ def deserialize_iso(attr):
+ """Deserialize ISO-8601 formatted string into Datetime object.
+
+ :param str attr: response string to be deserialized.
+ :rtype: Datetime
+ :raises: DeserializationError if string format invalid.
+ """
+ if isinstance(attr, ET.Element):
+ attr = attr.text
+ try:
+ attr = attr.upper() # type: ignore
+ match = Deserializer.valid_date.match(attr)
+ if not match:
+ raise ValueError("Invalid datetime string: " + attr)
+
+ check_decimal = attr.split(".")
+ if len(check_decimal) > 1:
+ decimal_str = ""
+ for digit in check_decimal[1]:
+ if digit.isdigit():
+ decimal_str += digit
+ else:
+ break
+ if len(decimal_str) > 6:
+ attr = attr.replace(decimal_str, decimal_str[0:6])
+
+ date_obj = isodate.parse_datetime(attr)
+ test_utc = date_obj.utctimetuple()
+ if test_utc.tm_year > 9999 or test_utc.tm_year < 1:
+ raise OverflowError("Hit max or min date")
+ except (ValueError, OverflowError, AttributeError) as err:
+ msg = "Cannot deserialize datetime object."
+ raise DeserializationError(msg) from err
+ else:
+ return date_obj
+
+ @staticmethod
+ def deserialize_unix(attr):
+ """Serialize Datetime object into IntTime format.
+ This is represented as seconds.
+
+ :param int attr: Object to be serialized.
+ :rtype: Datetime
+ :raises: DeserializationError if format invalid
+ """
+ if isinstance(attr, ET.Element):
+ attr = int(attr.text) # type: ignore
+ try:
+ attr = int(attr)
+ date_obj = datetime.datetime.fromtimestamp(attr, TZ_UTC)
+ except ValueError as err:
+ msg = "Cannot deserialize to unix datetime object."
+ raise DeserializationError(msg) from err
+ else:
+ return date_obj
diff --git a/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/_vendor.py b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/_vendor.py
new file mode 100644
index 00000000..3e291d2b
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/_vendor.py
@@ -0,0 +1,26 @@
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from abc import ABC
+from typing import TYPE_CHECKING
+
+from ._configuration import QuickpulseClientConfiguration
+
+if TYPE_CHECKING:
+ # pylint: disable=unused-import,ungrouped-imports
+ from azure.core import PipelineClient
+
+ from ._serialization import Deserializer, Serializer
+
+
+class QuickpulseClientMixinABC(ABC):
+ """DO NOT use this class. It is for internal typing use only."""
+
+ _client: "PipelineClient"
+ _config: QuickpulseClientConfiguration
+ _serialize: "Serializer"
+ _deserialize: "Deserializer"
diff --git a/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/aio/__init__.py b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/aio/__init__.py
new file mode 100644
index 00000000..664b539c
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/aio/__init__.py
@@ -0,0 +1,23 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from ._client import QuickpulseClient
+
+try:
+ from ._patch import __all__ as _patch_all
+ from ._patch import * # pylint: disable=unused-wildcard-import
+except ImportError:
+ _patch_all = []
+from ._patch import patch_sdk as _patch_sdk
+
+__all__ = [
+ "QuickpulseClient",
+]
+__all__.extend([p for p in _patch_all if p not in __all__])
+
+_patch_sdk()
diff --git a/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/aio/_client.py b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/aio/_client.py
new file mode 100644
index 00000000..2f362639
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/aio/_client.py
@@ -0,0 +1,95 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from copy import deepcopy
+from typing import Any, Awaitable, TYPE_CHECKING
+
+from azure.core import AsyncPipelineClient
+from azure.core.pipeline import policies
+from azure.core.rest import AsyncHttpResponse, HttpRequest
+
+from .. import models as _models
+from .._serialization import Deserializer, Serializer
+from ._configuration import QuickpulseClientConfiguration
+from ._operations import QuickpulseClientOperationsMixin
+
+if TYPE_CHECKING:
+ # pylint: disable=unused-import,ungrouped-imports
+ from azure.core.credentials_async import AsyncTokenCredential
+
+
+class QuickpulseClient(QuickpulseClientOperationsMixin): # pylint: disable=client-accepts-api-version-keyword
+ """Quickpulse Client.
+
+ :param credential: Credential needed for the client to connect to Azure. Required.
+ :type credential: ~azure.core.credentials_async.AsyncTokenCredential
+ :keyword api_version: Api Version. Default value is "2024-04-01-preview". Note that overriding
+ this default value may result in unsupported behavior.
+ :paramtype api_version: str
+ """
+
+ def __init__(self, credential: "AsyncTokenCredential", **kwargs: Any) -> None:
+ _endpoint = "{endpoint}"
+ self._config = QuickpulseClientConfiguration(credential=credential, **kwargs)
+ _policies = kwargs.pop("policies", None)
+ if _policies is None:
+ _policies = [
+ policies.RequestIdPolicy(**kwargs),
+ self._config.headers_policy,
+ self._config.user_agent_policy,
+ self._config.proxy_policy,
+ policies.ContentDecodePolicy(**kwargs),
+ self._config.redirect_policy,
+ self._config.retry_policy,
+ self._config.authentication_policy,
+ self._config.custom_hook_policy,
+ self._config.logging_policy,
+ policies.DistributedTracingPolicy(**kwargs),
+ policies.SensitiveHeaderCleanupPolicy(**kwargs) if self._config.redirect_policy else None,
+ self._config.http_logging_policy,
+ ]
+ self._client: AsyncPipelineClient = AsyncPipelineClient(base_url=_endpoint, policies=_policies, **kwargs)
+
+ client_models = {k: v for k, v in _models.__dict__.items() if isinstance(v, type)}
+ self._serialize = Serializer(client_models)
+ self._deserialize = Deserializer(client_models)
+ self._serialize.client_side_validation = False
+
+ def send_request(
+ self, request: HttpRequest, *, stream: bool = False, **kwargs: Any
+ ) -> Awaitable[AsyncHttpResponse]:
+ """Runs the network request through the client's chained policies.
+
+ >>> from azure.core.rest import HttpRequest
+ >>> request = HttpRequest("GET", "https://www.example.org/")
+ <HttpRequest [GET], url: 'https://www.example.org/'>
+ >>> response = await client.send_request(request)
+ <AsyncHttpResponse: 200 OK>
+
+ For more information on this code flow, see https://aka.ms/azsdk/dpcodegen/python/send_request
+
+ :param request: The network request you want to make. Required.
+ :type request: ~azure.core.rest.HttpRequest
+ :keyword bool stream: Whether the response payload will be streamed. Defaults to False.
+ :return: The response of your network call. Does not do error handling on your response.
+ :rtype: ~azure.core.rest.AsyncHttpResponse
+ """
+
+ request_copy = deepcopy(request)
+ request_copy.url = self._client.format_url(request_copy.url)
+ return self._client.send_request(request_copy, stream=stream, **kwargs) # type: ignore
+
+ async def close(self) -> None:
+ await self._client.close()
+
+ async def __aenter__(self) -> "QuickpulseClient":
+ await self._client.__aenter__()
+ return self
+
+ async def __aexit__(self, *exc_details: Any) -> None:
+ await self._client.__aexit__(*exc_details)
diff --git a/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/aio/_configuration.py b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/aio/_configuration.py
new file mode 100644
index 00000000..d6746c09
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/aio/_configuration.py
@@ -0,0 +1,59 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from typing import Any, TYPE_CHECKING
+
+from azure.core.pipeline import policies
+
+if TYPE_CHECKING:
+ # pylint: disable=unused-import,ungrouped-imports
+ from azure.core.credentials_async import AsyncTokenCredential
+
+VERSION = "unknown"
+
+
+class QuickpulseClientConfiguration: # pylint: disable=too-many-instance-attributes,name-too-long
+ """Configuration for QuickpulseClient.
+
+ Note that all parameters used to create this instance are saved as instance
+ attributes.
+
+ :param credential: Credential needed for the client to connect to Azure. Required.
+ :type credential: ~azure.core.credentials_async.AsyncTokenCredential
+ :keyword api_version: Api Version. Default value is "2024-04-01-preview". Note that overriding
+ this default value may result in unsupported behavior.
+ :paramtype api_version: str
+ """
+
+ def __init__(self, credential: "AsyncTokenCredential", **kwargs: Any) -> None:
+ api_version: str = kwargs.pop("api_version", "2024-04-01-preview")
+
+ if credential is None:
+ raise ValueError("Parameter 'credential' must not be None.")
+
+ self.credential = credential
+ self.api_version = api_version
+ self.credential_scopes = kwargs.pop("credential_scopes", ["https://monitor.azure.com/.default"])
+ kwargs.setdefault("sdk_moniker", "quickpulseclient/{}".format(VERSION))
+ self.polling_interval = kwargs.get("polling_interval", 30)
+ self._configure(**kwargs)
+
+ def _configure(self, **kwargs: Any) -> None:
+ self.user_agent_policy = kwargs.get("user_agent_policy") or policies.UserAgentPolicy(**kwargs)
+ self.headers_policy = kwargs.get("headers_policy") or policies.HeadersPolicy(**kwargs)
+ self.proxy_policy = kwargs.get("proxy_policy") or policies.ProxyPolicy(**kwargs)
+ self.logging_policy = kwargs.get("logging_policy") or policies.NetworkTraceLoggingPolicy(**kwargs)
+ self.http_logging_policy = kwargs.get("http_logging_policy") or policies.HttpLoggingPolicy(**kwargs)
+ self.custom_hook_policy = kwargs.get("custom_hook_policy") or policies.CustomHookPolicy(**kwargs)
+ self.redirect_policy = kwargs.get("redirect_policy") or policies.AsyncRedirectPolicy(**kwargs)
+ self.retry_policy = kwargs.get("retry_policy") or policies.AsyncRetryPolicy(**kwargs)
+ self.authentication_policy = kwargs.get("authentication_policy")
+ if self.credential and not self.authentication_policy:
+ self.authentication_policy = policies.AsyncBearerTokenCredentialPolicy(
+ self.credential, *self.credential_scopes, **kwargs
+ )
diff --git a/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/aio/_operations/__init__.py b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/aio/_operations/__init__.py
new file mode 100644
index 00000000..3d1697f9
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/aio/_operations/__init__.py
@@ -0,0 +1,19 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from ._operations import QuickpulseClientOperationsMixin
+
+from ._patch import __all__ as _patch_all
+from ._patch import * # pylint: disable=unused-wildcard-import
+from ._patch import patch_sdk as _patch_sdk
+
+__all__ = [
+ "QuickpulseClientOperationsMixin",
+]
+__all__.extend([p for p in _patch_all if p not in __all__])
+_patch_sdk()
diff --git a/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/aio/_operations/_operations.py b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/aio/_operations/_operations.py
new file mode 100644
index 00000000..eecf72f7
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/aio/_operations/_operations.py
@@ -0,0 +1,464 @@
+# pylint: disable=too-many-lines,too-many-statements
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from io import IOBase
+import sys
+from typing import Any, Callable, Dict, IO, List, Optional, Type, TypeVar, Union, overload
+
+from azure.core.exceptions import (
+ ClientAuthenticationError,
+ HttpResponseError,
+ ResourceExistsError,
+ ResourceNotFoundError,
+ ResourceNotModifiedError,
+ map_error,
+)
+from azure.core.pipeline import PipelineResponse
+from azure.core.rest import AsyncHttpResponse, HttpRequest
+from azure.core.tracing.decorator_async import distributed_trace_async
+from azure.core.utils import case_insensitive_dict
+
+from ... import models as _models
+from ..._operations._operations import build_quickpulse_is_subscribed_request, build_quickpulse_publish_request
+from .._vendor import QuickpulseClientMixinABC
+
+if sys.version_info >= (3, 9):
+ from collections.abc import MutableMapping
+else:
+ from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports
+T = TypeVar("T")
+ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
+
+
+class QuickpulseClientOperationsMixin(QuickpulseClientMixinABC):
+ @overload
+ async def is_subscribed(
+ self,
+ endpoint: str = "https://global.livediagnostics.monitor.azure.com",
+ monitoring_data_point: Optional[_models.MonitoringDataPoint] = None,
+ *,
+ ikey: str,
+ transmission_time: Optional[int] = None,
+ machine_name: Optional[str] = None,
+ instance_name: Optional[str] = None,
+ stream_id: Optional[str] = None,
+ role_name: Optional[str] = None,
+ invariant_version: Optional[str] = None,
+ configuration_etag: Optional[str] = None,
+ content_type: str = "application/json",
+ **kwargs: Any
+ ) -> _models.CollectionConfigurationInfo:
+ """Determine whether there is any subscription to the metrics and documents.
+
+ :param endpoint: The endpoint of the Live Metrics service. Default value is
+ "https://global.livediagnostics.monitor.azure.com".
+ :type endpoint: str
+ :param monitoring_data_point: Data contract between Application Insights client SDK and Live
+ Metrics. /QuickPulseService.svc/ping uses this as a backup source of machine name, instance
+ name and invariant version. Default value is None.
+ :type monitoring_data_point: ~quickpulse_client.models.MonitoringDataPoint
+ :keyword ikey: The instrumentation key of the target Application Insights component for which
+ the client checks whether there's any subscription to it. Required.
+ :paramtype ikey: str
+ :keyword transmission_time: Timestamp when the client transmits the metrics and documents to
+ Live Metrics. A 8-byte long type of ticks. Default value is None.
+ :paramtype transmission_time: int
+ :keyword machine_name: Computer name where Application Insights SDK lives. Live Metrics uses
+ machine name with instance name as a backup. Default value is None.
+ :paramtype machine_name: str
+ :keyword instance_name: Service instance name where Application Insights SDK lives. Live
+ Metrics uses machine name with instance name as a backup. Default value is None.
+ :paramtype instance_name: str
+ :keyword stream_id: Identifies an Application Insights SDK as trusted agent to report metrics
+ and documents. Default value is None.
+ :paramtype stream_id: str
+ :keyword role_name: Cloud role name of the service. Default value is None.
+ :paramtype role_name: str
+ :keyword invariant_version: Version/generation of the data contract (MonitoringDataPoint)
+ between the client and Live Metrics. Default value is None.
+ :paramtype invariant_version: str
+ :keyword configuration_etag: An encoded string that indicates whether the collection
+ configuration is changed. Default value is None.
+ :paramtype configuration_etag: str
+ :keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
+ Default value is "application/json".
+ :paramtype content_type: str
+ :return: CollectionConfigurationInfo
+ :rtype: ~quickpulse_client.models.CollectionConfigurationInfo
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+
+ @overload
+ async def is_subscribed(
+ self,
+ endpoint: str = "https://global.livediagnostics.monitor.azure.com",
+ monitoring_data_point: Optional[IO[bytes]] = None,
+ *,
+ ikey: str,
+ transmission_time: Optional[int] = None,
+ machine_name: Optional[str] = None,
+ instance_name: Optional[str] = None,
+ stream_id: Optional[str] = None,
+ role_name: Optional[str] = None,
+ invariant_version: Optional[str] = None,
+ configuration_etag: Optional[str] = None,
+ content_type: str = "application/json",
+ **kwargs: Any
+ ) -> _models.CollectionConfigurationInfo:
+ """Determine whether there is any subscription to the metrics and documents.
+
+ :param endpoint: The endpoint of the Live Metrics service. Default value is
+ "https://global.livediagnostics.monitor.azure.com".
+ :type endpoint: str
+ :param monitoring_data_point: Data contract between Application Insights client SDK and Live
+ Metrics. /QuickPulseService.svc/ping uses this as a backup source of machine name, instance
+ name and invariant version. Default value is None.
+ :type monitoring_data_point: IO[bytes]
+ :keyword ikey: The instrumentation key of the target Application Insights component for which
+ the client checks whether there's any subscription to it. Required.
+ :paramtype ikey: str
+ :keyword transmission_time: Timestamp when the client transmits the metrics and documents to
+ Live Metrics. A 8-byte long type of ticks. Default value is None.
+ :paramtype transmission_time: int
+ :keyword machine_name: Computer name where Application Insights SDK lives. Live Metrics uses
+ machine name with instance name as a backup. Default value is None.
+ :paramtype machine_name: str
+ :keyword instance_name: Service instance name where Application Insights SDK lives. Live
+ Metrics uses machine name with instance name as a backup. Default value is None.
+ :paramtype instance_name: str
+ :keyword stream_id: Identifies an Application Insights SDK as trusted agent to report metrics
+ and documents. Default value is None.
+ :paramtype stream_id: str
+ :keyword role_name: Cloud role name of the service. Default value is None.
+ :paramtype role_name: str
+ :keyword invariant_version: Version/generation of the data contract (MonitoringDataPoint)
+ between the client and Live Metrics. Default value is None.
+ :paramtype invariant_version: str
+ :keyword configuration_etag: An encoded string that indicates whether the collection
+ configuration is changed. Default value is None.
+ :paramtype configuration_etag: str
+ :keyword content_type: Body Parameter content-type. Content type parameter for binary body.
+ Default value is "application/json".
+ :paramtype content_type: str
+ :return: CollectionConfigurationInfo
+ :rtype: ~quickpulse_client.models.CollectionConfigurationInfo
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+
+ # @distributed_trace_async
+ async def is_subscribed(
+ self,
+ endpoint: str = "https://global.livediagnostics.monitor.azure.com",
+ monitoring_data_point: Optional[Union[_models.MonitoringDataPoint, IO[bytes]]] = None,
+ *,
+ ikey: str,
+ transmission_time: Optional[int] = None,
+ machine_name: Optional[str] = None,
+ instance_name: Optional[str] = None,
+ stream_id: Optional[str] = None,
+ role_name: Optional[str] = None,
+ invariant_version: Optional[str] = None,
+ configuration_etag: Optional[str] = None,
+ **kwargs: Any
+ ) -> _models.CollectionConfigurationInfo:
+ """Determine whether there is any subscription to the metrics and documents.
+
+ :param endpoint: The endpoint of the Live Metrics service. Default value is
+ "https://global.livediagnostics.monitor.azure.com".
+ :type endpoint: str
+ :param monitoring_data_point: Data contract between Application Insights client SDK and Live
+ Metrics. /QuickPulseService.svc/ping uses this as a backup source of machine name, instance
+ name and invariant version. Is either a MonitoringDataPoint type or a IO[bytes] type. Default
+ value is None.
+ :type monitoring_data_point: ~quickpulse_client.models.MonitoringDataPoint or IO[bytes]
+ :keyword ikey: The instrumentation key of the target Application Insights component for which
+ the client checks whether there's any subscription to it. Required.
+ :paramtype ikey: str
+ :keyword transmission_time: Timestamp when the client transmits the metrics and documents to
+ Live Metrics. A 8-byte long type of ticks. Default value is None.
+ :paramtype transmission_time: int
+ :keyword machine_name: Computer name where Application Insights SDK lives. Live Metrics uses
+ machine name with instance name as a backup. Default value is None.
+ :paramtype machine_name: str
+ :keyword instance_name: Service instance name where Application Insights SDK lives. Live
+ Metrics uses machine name with instance name as a backup. Default value is None.
+ :paramtype instance_name: str
+ :keyword stream_id: Identifies an Application Insights SDK as trusted agent to report metrics
+ and documents. Default value is None.
+ :paramtype stream_id: str
+ :keyword role_name: Cloud role name of the service. Default value is None.
+ :paramtype role_name: str
+ :keyword invariant_version: Version/generation of the data contract (MonitoringDataPoint)
+ between the client and Live Metrics. Default value is None.
+ :paramtype invariant_version: str
+ :keyword configuration_etag: An encoded string that indicates whether the collection
+ configuration is changed. Default value is None.
+ :paramtype configuration_etag: str
+ :return: CollectionConfigurationInfo
+ :rtype: ~quickpulse_client.models.CollectionConfigurationInfo
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ 401: ClientAuthenticationError,
+ 404: ResourceNotFoundError,
+ 409: ResourceExistsError,
+ 304: ResourceNotModifiedError,
+ }
+ error_map.update(kwargs.pop("error_map", {}) or {})
+
+ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
+ _params = kwargs.pop("params", {}) or {}
+
+ content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
+ cls: ClsType[_models.CollectionConfigurationInfo] = kwargs.pop("cls", None)
+
+ content_type = content_type or "application/json"
+ _json = None
+ _content = None
+ if isinstance(monitoring_data_point, (IOBase, bytes)):
+ _content = monitoring_data_point
+ else:
+ if monitoring_data_point is not None:
+ _json = self._serialize.body(monitoring_data_point, "MonitoringDataPoint")
+ else:
+ _json = None
+
+ _request = build_quickpulse_is_subscribed_request(
+ ikey=ikey,
+ transmission_time=transmission_time,
+ machine_name=machine_name,
+ instance_name=instance_name,
+ stream_id=stream_id,
+ role_name=role_name,
+ invariant_version=invariant_version,
+ configuration_etag=configuration_etag,
+ content_type=content_type,
+ api_version=self._config.api_version,
+ json=_json,
+ content=_content,
+ headers=_headers,
+ params=_params,
+ )
+ path_format_arguments = {
+ "endpoint": self._serialize.url("endpoint", endpoint, "str", skip_quote=True),
+ }
+ _request.url = self._client.format_url(_request.url, **path_format_arguments)
+
+ _stream = False
+ pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
+ )
+
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ if _stream:
+ await response.read() # Load the body in memory and close the socket
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ServiceError, pipeline_response)
+ raise HttpResponseError(response=response, model=error)
+
+ response_headers = {}
+ response_headers["x-ms-qps-configuration-etag"] = self._deserialize(
+ "str", response.headers.get("x-ms-qps-configuration-etag")
+ )
+ response_headers["x-ms-qps-service-endpoint-redirect-v2"] = self._deserialize(
+ "str", response.headers.get("x-ms-qps-service-endpoint-redirect-v2")
+ )
+ response_headers["x-ms-qps-service-polling-interval-hint"] = self._deserialize(
+ "str", response.headers.get("x-ms-qps-service-polling-interval-hint")
+ )
+ response_headers["x-ms-qps-subscribed"] = self._deserialize("str", response.headers.get("x-ms-qps-subscribed"))
+
+ deserialized = self._deserialize("CollectionConfigurationInfo", pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, response_headers) # type: ignore
+
+ return deserialized # type: ignore
+
+ @overload
+ async def publish(
+ self,
+ endpoint: str = "https://global.livediagnostics.monitor.azure.com",
+ monitoring_data_points: Optional[List[_models.MonitoringDataPoint]] = None,
+ *,
+ ikey: str,
+ configuration_etag: Optional[str] = None,
+ transmission_time: Optional[int] = None,
+ content_type: str = "application/json",
+ **kwargs: Any
+ ) -> _models.CollectionConfigurationInfo:
+ """Publish live metrics to the Live Metrics service when there is an active subscription to the
+ metrics.
+
+ :param endpoint: The endpoint of the Live Metrics service. Default value is
+ "https://global.livediagnostics.monitor.azure.com".
+ :type endpoint: str
+ :param monitoring_data_points: Data contract between the client and Live Metrics.
+ /QuickPulseService.svc/ping uses this as a backup source of machine name, instance name and
+ invariant version. Default value is None.
+ :type monitoring_data_points: list[~quickpulse_client.models.MonitoringDataPoint]
+ :keyword ikey: The instrumentation key of the target Application Insights component for which
+ the client checks whether there's any subscription to it. Required.
+ :paramtype ikey: str
+ :keyword configuration_etag: An encoded string that indicates whether the collection
+ configuration is changed. Default value is None.
+ :paramtype configuration_etag: str
+ :keyword transmission_time: Timestamp when the client transmits the metrics and documents to
+ Live Metrics. A 8-byte long type of ticks. Default value is None.
+ :paramtype transmission_time: int
+ :keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
+ Default value is "application/json".
+ :paramtype content_type: str
+ :return: CollectionConfigurationInfo
+ :rtype: ~quickpulse_client.models.CollectionConfigurationInfo
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+
+ @overload
+ async def publish(
+ self,
+ endpoint: str = "https://global.livediagnostics.monitor.azure.com",
+ monitoring_data_points: Optional[IO[bytes]] = None,
+ *,
+ ikey: str,
+ configuration_etag: Optional[str] = None,
+ transmission_time: Optional[int] = None,
+ content_type: str = "application/json",
+ **kwargs: Any
+ ) -> _models.CollectionConfigurationInfo:
+ """Publish live metrics to the Live Metrics service when there is an active subscription to the
+ metrics.
+
+ :param endpoint: The endpoint of the Live Metrics service. Default value is
+ "https://global.livediagnostics.monitor.azure.com".
+ :type endpoint: str
+ :param monitoring_data_points: Data contract between the client and Live Metrics.
+ /QuickPulseService.svc/ping uses this as a backup source of machine name, instance name and
+ invariant version. Default value is None.
+ :type monitoring_data_points: IO[bytes]
+ :keyword ikey: The instrumentation key of the target Application Insights component for which
+ the client checks whether there's any subscription to it. Required.
+ :paramtype ikey: str
+ :keyword configuration_etag: An encoded string that indicates whether the collection
+ configuration is changed. Default value is None.
+ :paramtype configuration_etag: str
+ :keyword transmission_time: Timestamp when the client transmits the metrics and documents to
+ Live Metrics. A 8-byte long type of ticks. Default value is None.
+ :paramtype transmission_time: int
+ :keyword content_type: Body Parameter content-type. Content type parameter for binary body.
+ Default value is "application/json".
+ :paramtype content_type: str
+ :return: CollectionConfigurationInfo
+ :rtype: ~quickpulse_client.models.CollectionConfigurationInfo
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+
+ # @distributed_trace_async
+ async def publish(
+ self,
+ endpoint: str = "https://global.livediagnostics.monitor.azure.com",
+ monitoring_data_points: Optional[Union[List[_models.MonitoringDataPoint], IO[bytes]]] = None,
+ *,
+ ikey: str,
+ configuration_etag: Optional[str] = None,
+ transmission_time: Optional[int] = None,
+ **kwargs: Any
+ ) -> _models.CollectionConfigurationInfo:
+ """Publish live metrics to the Live Metrics service when there is an active subscription to the
+ metrics.
+
+ :param endpoint: The endpoint of the Live Metrics service. Default value is
+ "https://global.livediagnostics.monitor.azure.com".
+ :type endpoint: str
+ :param monitoring_data_points: Data contract between the client and Live Metrics.
+ /QuickPulseService.svc/ping uses this as a backup source of machine name, instance name and
+ invariant version. Is either a [MonitoringDataPoint] type or a IO[bytes] type. Default value is
+ None.
+ :type monitoring_data_points: list[~quickpulse_client.models.MonitoringDataPoint] or IO[bytes]
+ :keyword ikey: The instrumentation key of the target Application Insights component for which
+ the client checks whether there's any subscription to it. Required.
+ :paramtype ikey: str
+ :keyword configuration_etag: An encoded string that indicates whether the collection
+ configuration is changed. Default value is None.
+ :paramtype configuration_etag: str
+ :keyword transmission_time: Timestamp when the client transmits the metrics and documents to
+ Live Metrics. A 8-byte long type of ticks. Default value is None.
+ :paramtype transmission_time: int
+ :return: CollectionConfigurationInfo
+ :rtype: ~quickpulse_client.models.CollectionConfigurationInfo
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ 401: ClientAuthenticationError,
+ 404: ResourceNotFoundError,
+ 409: ResourceExistsError,
+ 304: ResourceNotModifiedError,
+ }
+ error_map.update(kwargs.pop("error_map", {}) or {})
+
+ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
+ _params = kwargs.pop("params", {}) or {}
+
+ content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
+ cls: ClsType[_models.CollectionConfigurationInfo] = kwargs.pop("cls", None)
+
+ content_type = content_type or "application/json"
+ _json = None
+ _content = None
+ if isinstance(monitoring_data_points, (IOBase, bytes)):
+ _content = monitoring_data_points
+ else:
+ if monitoring_data_points is not None:
+ _json = self._serialize.body(monitoring_data_points, "[MonitoringDataPoint]")
+ else:
+ _json = None
+
+ _request = build_quickpulse_publish_request(
+ ikey=ikey,
+ configuration_etag=configuration_etag,
+ transmission_time=transmission_time,
+ content_type=content_type,
+ api_version=self._config.api_version,
+ json=_json,
+ content=_content,
+ headers=_headers,
+ params=_params,
+ )
+ path_format_arguments = {
+ "endpoint": self._serialize.url("endpoint", endpoint, "str", skip_quote=True),
+ }
+ _request.url = self._client.format_url(_request.url, **path_format_arguments)
+
+ _stream = False
+ pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
+ )
+
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ if _stream:
+ await response.read() # Load the body in memory and close the socket
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ServiceError, pipeline_response)
+ raise HttpResponseError(response=response, model=error)
+
+ response_headers = {}
+ response_headers["x-ms-qps-configuration-etag"] = self._deserialize(
+ "str", response.headers.get("x-ms-qps-configuration-etag")
+ )
+ response_headers["x-ms-qps-subscribed"] = self._deserialize("str", response.headers.get("x-ms-qps-subscribed"))
+
+ deserialized = self._deserialize("CollectionConfigurationInfo", pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, response_headers) # type: ignore
+
+ return deserialized # type: ignore
diff --git a/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/aio/_operations/_patch.py b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/aio/_operations/_patch.py
new file mode 100644
index 00000000..f7dd3251
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/aio/_operations/_patch.py
@@ -0,0 +1,20 @@
+# ------------------------------------
+# Copyright (c) Microsoft Corporation.
+# Licensed under the MIT License.
+# ------------------------------------
+"""Customize generated code here.
+
+Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize
+"""
+from typing import List
+
+__all__: List[str] = [] # Add all objects you want publicly available to users at this package level
+
+
+def patch_sdk():
+ """Do not remove from this file.
+
+ `patch_sdk` is a last resort escape hatch that allows you to do customizations
+ you can't accomplish using the techniques described in
+ https://aka.ms/azsdk/python/dpcodegen/python/customize
+ """
diff --git a/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/aio/_patch.py b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/aio/_patch.py
new file mode 100644
index 00000000..f7dd3251
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/aio/_patch.py
@@ -0,0 +1,20 @@
+# ------------------------------------
+# Copyright (c) Microsoft Corporation.
+# Licensed under the MIT License.
+# ------------------------------------
+"""Customize generated code here.
+
+Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize
+"""
+from typing import List
+
+__all__: List[str] = [] # Add all objects you want publicly available to users at this package level
+
+
+def patch_sdk():
+ """Do not remove from this file.
+
+ `patch_sdk` is a last resort escape hatch that allows you to do customizations
+ you can't accomplish using the techniques described in
+ https://aka.ms/azsdk/python/dpcodegen/python/customize
+ """
diff --git a/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/aio/_vendor.py b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/aio/_vendor.py
new file mode 100644
index 00000000..40b0a738
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/aio/_vendor.py
@@ -0,0 +1,26 @@
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from abc import ABC
+from typing import TYPE_CHECKING
+
+from ._configuration import QuickpulseClientConfiguration
+
+if TYPE_CHECKING:
+ # pylint: disable=unused-import,ungrouped-imports
+ from azure.core import AsyncPipelineClient
+
+ from .._serialization import Deserializer, Serializer
+
+
+class QuickpulseClientMixinABC(ABC):
+ """DO NOT use this class. It is for internal typing use only."""
+
+ _client: "AsyncPipelineClient"
+ _config: QuickpulseClientConfiguration
+ _serialize: "Serializer"
+ _deserialize: "Deserializer"
diff --git a/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/models/__init__.py b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/models/__init__.py
new file mode 100644
index 00000000..f9d7183a
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/models/__init__.py
@@ -0,0 +1,65 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from ._models import CollectionConfigurationError
+from ._models import CollectionConfigurationInfo
+from ._models import DerivedMetricInfo
+from ._models import DocumentFilterConjunctionGroupInfo
+from ._models import DocumentIngress
+from ._models import DocumentStreamInfo
+from ._models import Event
+from ._models import Exception
+from ._models import FilterConjunctionGroupInfo
+from ._models import FilterInfo
+from ._models import KeyValuePairString
+from ._models import MetricPoint
+from ._models import MonitoringDataPoint
+from ._models import ProcessCpuData
+from ._models import QuotaConfigurationInfo
+from ._models import RemoteDependency
+from ._models import Request
+from ._models import ServiceError
+from ._models import Trace
+
+from ._enums import AggregationType
+from ._enums import CollectionConfigurationErrorType
+from ._enums import DocumentType
+from ._enums import PredicateType
+from ._enums import TelemetryType
+from ._patch import __all__ as _patch_all
+from ._patch import * # pylint: disable=unused-wildcard-import
+from ._patch import patch_sdk as _patch_sdk
+
+__all__ = [
+ "CollectionConfigurationError",
+ "CollectionConfigurationInfo",
+ "DerivedMetricInfo",
+ "DocumentFilterConjunctionGroupInfo",
+ "DocumentIngress",
+ "DocumentStreamInfo",
+ "Event",
+ "Exception",
+ "FilterConjunctionGroupInfo",
+ "FilterInfo",
+ "KeyValuePairString",
+ "MetricPoint",
+ "MonitoringDataPoint",
+ "ProcessCpuData",
+ "QuotaConfigurationInfo",
+ "RemoteDependency",
+ "Request",
+ "ServiceError",
+ "Trace",
+ "AggregationType",
+ "CollectionConfigurationErrorType",
+ "DocumentType",
+ "PredicateType",
+ "TelemetryType",
+]
+__all__.extend([p for p in _patch_all if p not in __all__])
+_patch_sdk()
diff --git a/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/models/_enums.py b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/models/_enums.py
new file mode 100644
index 00000000..c39af21f
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/models/_enums.py
@@ -0,0 +1,111 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from enum import Enum
+from azure.core import CaseInsensitiveEnumMeta
+
+
+class AggregationType(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ """Aggregation type."""
+
+ AVG = "Avg"
+ """Average"""
+ SUM = "Sum"
+ """Sum"""
+ MIN = "Min"
+ """Minimum"""
+ MAX = "Max"
+ """Maximum"""
+
+
+class CollectionConfigurationErrorType(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ """Collection configuration error type reported by the client SDK."""
+
+ UNKNOWN = "Unknown"
+ """Unknown error type."""
+ PERFORMANCE_COUNTER_PARSING = "PerformanceCounterParsing"
+ """Performance counter parsing error."""
+ PERFORMANCE_COUNTER_UNEXPECTED = "PerformanceCounterUnexpected"
+ """Performance counter unexpected error."""
+ PERFORMANCE_COUNTER_DUPLICATE_IDS = "PerformanceCounterDuplicateIds"
+ """Performance counter duplicate ids."""
+ DOCUMENT_STREAM_DUPLICATE_IDS = "DocumentStreamDuplicateIds"
+ """Document stream duplication ids."""
+ DOCUMENT_STREAM_FAILURE_TO_CREATE = "DocumentStreamFailureToCreate"
+ """Document stream failed to create."""
+ DOCUMENT_STREAM_FAILURE_TO_CREATE_FILTER_UNEXPECTED = "DocumentStreamFailureToCreateFilterUnexpected"
+ """Document stream failed to create filter unexpectedly."""
+ METRIC_DUPLICATE_IDS = "MetricDuplicateIds"
+ """Metric duplicate ids."""
+ METRIC_TELEMETRY_TYPE_UNSUPPORTED = "MetricTelemetryTypeUnsupported"
+ """Metric telemetry type unsupported."""
+ METRIC_FAILURE_TO_CREATE = "MetricFailureToCreate"
+ """Metric failed to create."""
+ METRIC_FAILURE_TO_CREATE_FILTER_UNEXPECTED = "MetricFailureToCreateFilterUnexpected"
+ """Metric failed to create filter unexpectedly."""
+ FILTER_FAILURE_TO_CREATE_UNEXPECTED = "FilterFailureToCreateUnexpected"
+ """Filter failed to create unexpectedly."""
+ COLLECTION_CONFIGURATION_FAILURE_TO_CREATE_UNEXPECTED = "CollectionConfigurationFailureToCreateUnexpected"
+ """Collection configuration failed to create unexpectedly."""
+
+
+class DocumentType(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ """Document type."""
+
+ REQUEST = "Request"
+ """Represents a request telemetry type."""
+ REMOTE_DEPENDENCY = "RemoteDependency"
+ """Represents a remote dependency telemetry type."""
+ EXCEPTION = "Exception"
+ """Represents an exception telemetry type."""
+ EVENT = "Event"
+ """Represents an event telemetry type."""
+ TRACE = "Trace"
+ """Represents a trace telemetry type."""
+ UNKNOWN = "Unknown"
+ """Represents an unknown telemetry type."""
+
+
+class PredicateType(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ """Enum representing the different types of predicates."""
+
+ EQUAL = "Equal"
+ """Represents an equality predicate."""
+ NOT_EQUAL = "NotEqual"
+ """Represents a not-equal predicate."""
+ LESS_THAN = "LessThan"
+ """Represents a less-than predicate."""
+ GREATER_THAN = "GreaterThan"
+ """Represents a greater-than predicate."""
+ LESS_THAN_OR_EQUAL = "LessThanOrEqual"
+ """Represents a less-than-or-equal predicate."""
+ GREATER_THAN_OR_EQUAL = "GreaterThanOrEqual"
+ """Represents a greater-than-or-equal predicate."""
+ CONTAINS = "Contains"
+ """Represents a contains predicate."""
+ DOES_NOT_CONTAIN = "DoesNotContain"
+ """Represents a does-not-contain predicate."""
+
+
+class TelemetryType(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ """Telemetry type."""
+
+ REQUEST = "Request"
+ """Represents a request telemetry type."""
+ DEPENDENCY = "Dependency"
+ """Represents a dependency telemetry type."""
+ EXCEPTION = "Exception"
+ """Represents an exception telemetry type."""
+ EVENT = "Event"
+ """Represents an event telemetry type."""
+ METRIC = "Metric"
+ """Represents a metric telemetry type."""
+ PERFORMANCE_COUNTER = "PerformanceCounter"
+ """Represents a performance counter telemetry type."""
+ TRACE = "Trace"
+ """Represents a trace telemetry type."""
diff --git a/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/models/_models.py b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/models/_models.py
new file mode 100644
index 00000000..8e22c899
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/models/_models.py
@@ -0,0 +1,1123 @@
+# coding=utf-8
+# pylint: disable=too-many-lines
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+import datetime
+from typing import Any, List, Optional, TYPE_CHECKING, Union
+
+from .. import _serialization
+
+if TYPE_CHECKING:
+ # pylint: disable=unused-import,ungrouped-imports
+ from .. import models as _models
+
+
+class CollectionConfigurationError(_serialization.Model):
+ """Represents an error while SDK parses and applies an instance of CollectionConfigurationInfo.
+
+ All required parameters must be populated in order to send to server.
+
+ :ivar collection_configuration_error_type: Error type. Required. Known values are: "Unknown",
+ "PerformanceCounterParsing", "PerformanceCounterUnexpected", "PerformanceCounterDuplicateIds",
+ "DocumentStreamDuplicateIds", "DocumentStreamFailureToCreate",
+ "DocumentStreamFailureToCreateFilterUnexpected", "MetricDuplicateIds",
+ "MetricTelemetryTypeUnsupported", "MetricFailureToCreate",
+ "MetricFailureToCreateFilterUnexpected", "FilterFailureToCreateUnexpected", and
+ "CollectionConfigurationFailureToCreateUnexpected".
+ :vartype collection_configuration_error_type: str or
+ ~quickpulse_client.models.CollectionConfigurationErrorType
+ :ivar message: Error message. Required.
+ :vartype message: str
+ :ivar full_exception: Exception that led to the creation of the configuration error. Required.
+ :vartype full_exception: str
+ :ivar data: Custom properties to add more information to the error. Required.
+ :vartype data: list[~quickpulse_client.models.KeyValuePairString]
+ """
+
+ _validation = {
+ "collection_configuration_error_type": {"required": True},
+ "message": {"required": True},
+ "full_exception": {"required": True},
+ "data": {"required": True},
+ }
+
+ _attribute_map = {
+ "collection_configuration_error_type": {"key": "CollectionConfigurationErrorType", "type": "str"},
+ "message": {"key": "Message", "type": "str"},
+ "full_exception": {"key": "FullException", "type": "str"},
+ "data": {"key": "Data", "type": "[KeyValuePairString]"},
+ }
+
+ def __init__(
+ self,
+ *,
+ collection_configuration_error_type: Union[str, "_models.CollectionConfigurationErrorType"],
+ message: str,
+ full_exception: str,
+ data: List["_models.KeyValuePairString"],
+ **kwargs: Any
+ ) -> None:
+ """
+ :keyword collection_configuration_error_type: Error type. Required. Known values are:
+ "Unknown", "PerformanceCounterParsing", "PerformanceCounterUnexpected",
+ "PerformanceCounterDuplicateIds", "DocumentStreamDuplicateIds",
+ "DocumentStreamFailureToCreate", "DocumentStreamFailureToCreateFilterUnexpected",
+ "MetricDuplicateIds", "MetricTelemetryTypeUnsupported", "MetricFailureToCreate",
+ "MetricFailureToCreateFilterUnexpected", "FilterFailureToCreateUnexpected", and
+ "CollectionConfigurationFailureToCreateUnexpected".
+ :paramtype collection_configuration_error_type: str or
+ ~quickpulse_client.models.CollectionConfigurationErrorType
+ :keyword message: Error message. Required.
+ :paramtype message: str
+ :keyword full_exception: Exception that led to the creation of the configuration error.
+ Required.
+ :paramtype full_exception: str
+ :keyword data: Custom properties to add more information to the error. Required.
+ :paramtype data: list[~quickpulse_client.models.KeyValuePairString]
+ """
+ super().__init__(**kwargs)
+ self.collection_configuration_error_type = collection_configuration_error_type
+ self.message = message
+ self.full_exception = full_exception
+ self.data = data
+
+
+class CollectionConfigurationInfo(_serialization.Model):
+ """Represents the collection configuration - a customizable description of performance counters,
+ metrics, and full telemetry documents to be collected by the client SDK.
+
+ All required parameters must be populated in order to send to server.
+
+ :ivar e_tag: An encoded string that indicates whether the collection configuration is changed.
+ Required.
+ :vartype e_tag: str
+ :ivar metrics: An array of metric configuration info. Required.
+ :vartype metrics: list[~quickpulse_client.models.DerivedMetricInfo]
+ :ivar document_streams: An array of document stream configuration info. Required.
+ :vartype document_streams: list[~quickpulse_client.models.DocumentStreamInfo]
+ :ivar quota_info: Controls document quotas to be sent to Live Metrics.
+ :vartype quota_info: ~quickpulse_client.models.QuotaConfigurationInfo
+ """
+
+ _validation = {
+ "e_tag": {"required": True},
+ "metrics": {"required": True},
+ "document_streams": {"required": True},
+ }
+
+ _attribute_map = {
+ "e_tag": {"key": "ETag", "type": "str"},
+ "metrics": {"key": "Metrics", "type": "[DerivedMetricInfo]"},
+ "document_streams": {"key": "DocumentStreams", "type": "[DocumentStreamInfo]"},
+ "quota_info": {"key": "QuotaInfo", "type": "QuotaConfigurationInfo"},
+ }
+
+ def __init__(
+ self,
+ *,
+ e_tag: str,
+ metrics: List["_models.DerivedMetricInfo"],
+ document_streams: List["_models.DocumentStreamInfo"],
+ quota_info: Optional["_models.QuotaConfigurationInfo"] = None,
+ **kwargs: Any
+ ) -> None:
+ """
+ :keyword e_tag: An encoded string that indicates whether the collection configuration is
+ changed. Required.
+ :paramtype e_tag: str
+ :keyword metrics: An array of metric configuration info. Required.
+ :paramtype metrics: list[~quickpulse_client.models.DerivedMetricInfo]
+ :keyword document_streams: An array of document stream configuration info. Required.
+ :paramtype document_streams: list[~quickpulse_client.models.DocumentStreamInfo]
+ :keyword quota_info: Controls document quotas to be sent to Live Metrics.
+ :paramtype quota_info: ~quickpulse_client.models.QuotaConfigurationInfo
+ """
+ super().__init__(**kwargs)
+ self.e_tag = e_tag
+ self.metrics = metrics
+ self.document_streams = document_streams
+ self.quota_info = quota_info
+
+
+class DerivedMetricInfo(_serialization.Model):
+ """A metric configuration set by UX to scope the metrics it's interested in.
+
+ All required parameters must be populated in order to send to server.
+
+ :ivar id: metric configuration identifier. Required.
+ :vartype id: str
+ :ivar telemetry_type: Telemetry type. Required.
+ :vartype telemetry_type: str
+ :ivar filter_groups: A collection of filters to scope metrics that UX needs. Required.
+ :vartype filter_groups: list[~quickpulse_client.models.FilterConjunctionGroupInfo]
+ :ivar projection: Telemetry's metric dimension whose value is to be aggregated. Example values:
+ Duration, Count(),... Required.
+ :vartype projection: str
+ :ivar aggregation: Aggregation type. This is the aggregation done from everything within a
+ single server. Required. Known values are: "Avg", "Sum", "Min", and "Max".
+ :vartype aggregation: str or ~quickpulse_client.models.AggregationType
+ :ivar back_end_aggregation: Aggregation type. This Aggregation is done across the values for
+ all the servers taken together. Required. Known values are: "Avg", "Sum", "Min", and "Max".
+ :vartype back_end_aggregation: str or ~quickpulse_client.models.AggregationType
+ """
+
+ _validation = {
+ "id": {"required": True},
+ "telemetry_type": {"required": True},
+ "filter_groups": {"required": True},
+ "projection": {"required": True},
+ "aggregation": {"required": True},
+ "back_end_aggregation": {"required": True},
+ }
+
+ _attribute_map = {
+ "id": {"key": "Id", "type": "str"},
+ "telemetry_type": {"key": "TelemetryType", "type": "str"},
+ "filter_groups": {"key": "FilterGroups", "type": "[FilterConjunctionGroupInfo]"},
+ "projection": {"key": "Projection", "type": "str"},
+ "aggregation": {"key": "Aggregation", "type": "str"},
+ "back_end_aggregation": {"key": "BackEndAggregation", "type": "str"},
+ }
+
+ def __init__(
+ self,
+ *,
+ id: str, # pylint: disable=redefined-builtin
+ telemetry_type: str,
+ filter_groups: List["_models.FilterConjunctionGroupInfo"],
+ projection: str,
+ aggregation: Union[str, "_models.AggregationType"],
+ back_end_aggregation: Union[str, "_models.AggregationType"],
+ **kwargs: Any
+ ) -> None:
+ """
+ :keyword id: metric configuration identifier. Required.
+ :paramtype id: str
+ :keyword telemetry_type: Telemetry type. Required.
+ :paramtype telemetry_type: str
+ :keyword filter_groups: A collection of filters to scope metrics that UX needs. Required.
+ :paramtype filter_groups: list[~quickpulse_client.models.FilterConjunctionGroupInfo]
+ :keyword projection: Telemetry's metric dimension whose value is to be aggregated. Example
+ values: Duration, Count(),... Required.
+ :paramtype projection: str
+ :keyword aggregation: Aggregation type. This is the aggregation done from everything within a
+ single server. Required. Known values are: "Avg", "Sum", "Min", and "Max".
+ :paramtype aggregation: str or ~quickpulse_client.models.AggregationType
+ :keyword back_end_aggregation: Aggregation type. This Aggregation is done across the values for
+ all the servers taken together. Required. Known values are: "Avg", "Sum", "Min", and "Max".
+ :paramtype back_end_aggregation: str or ~quickpulse_client.models.AggregationType
+ """
+ super().__init__(**kwargs)
+ self.id = id
+ self.telemetry_type = telemetry_type
+ self.filter_groups = filter_groups
+ self.projection = projection
+ self.aggregation = aggregation
+ self.back_end_aggregation = back_end_aggregation
+
+
+class DocumentFilterConjunctionGroupInfo(_serialization.Model):
+ """A collection of filters for a specific telemetry type.
+
+ All required parameters must be populated in order to send to server.
+
+ :ivar telemetry_type: Telemetry type. Required. Known values are: "Request", "Dependency",
+ "Exception", "Event", "Metric", "PerformanceCounter", and "Trace".
+ :vartype telemetry_type: str or ~quickpulse_client.models.TelemetryType
+ :ivar filters: An array of filter groups. Required.
+ :vartype filters: ~quickpulse_client.models.FilterConjunctionGroupInfo
+ """
+
+ _validation = {
+ "telemetry_type": {"required": True},
+ "filters": {"required": True},
+ }
+
+ _attribute_map = {
+ "telemetry_type": {"key": "TelemetryType", "type": "str"},
+ "filters": {"key": "Filters", "type": "FilterConjunctionGroupInfo"},
+ }
+
+ def __init__(
+ self,
+ *,
+ telemetry_type: Union[str, "_models.TelemetryType"],
+ filters: "_models.FilterConjunctionGroupInfo",
+ **kwargs: Any
+ ) -> None:
+ """
+ :keyword telemetry_type: Telemetry type. Required. Known values are: "Request", "Dependency",
+ "Exception", "Event", "Metric", "PerformanceCounter", and "Trace".
+ :paramtype telemetry_type: str or ~quickpulse_client.models.TelemetryType
+ :keyword filters: An array of filter groups. Required.
+ :paramtype filters: ~quickpulse_client.models.FilterConjunctionGroupInfo
+ """
+ super().__init__(**kwargs)
+ self.telemetry_type = telemetry_type
+ self.filters = filters
+
+
+class DocumentIngress(_serialization.Model):
+ """Base class of the specific document types.
+
+ You probably want to use the sub-classes and not this class directly. Known sub-classes are:
+ Event, Exception, RemoteDependency, Request, Trace
+
+ All required parameters must be populated in order to send to server.
+
+ :ivar document_type: Telemetry type. Types not defined in enum will get replaced with a
+ 'Unknown' type. Required. Known values are: "Request", "RemoteDependency", "Exception",
+ "Event", "Trace", and "Unknown".
+ :vartype document_type: str or ~quickpulse_client.models.DocumentType
+ :ivar document_stream_ids: An array of document streaming ids. Each id identifies a flow of
+ documents customized by UX customers.
+ :vartype document_stream_ids: list[str]
+ :ivar properties: Collection of custom properties.
+ :vartype properties: list[~quickpulse_client.models.KeyValuePairString]
+ """
+
+ _validation = {
+ "document_type": {"required": True},
+ }
+
+ _attribute_map = {
+ "document_type": {"key": "DocumentType", "type": "str"},
+ "document_stream_ids": {"key": "DocumentStreamIds", "type": "[str]"},
+ "properties": {"key": "Properties", "type": "[KeyValuePairString]"},
+ }
+
+ _subtype_map = {
+ "document_type": {
+ "Event": "Event",
+ "Exception": "Exception",
+ "RemoteDependency": "RemoteDependency",
+ "Request": "Request",
+ "Trace": "Trace",
+ }
+ }
+
+ def __init__(
+ self,
+ *,
+ document_stream_ids: Optional[List[str]] = None,
+ properties: Optional[List["_models.KeyValuePairString"]] = None,
+ **kwargs: Any
+ ) -> None:
+ """
+ :keyword document_stream_ids: An array of document streaming ids. Each id identifies a flow of
+ documents customized by UX customers.
+ :paramtype document_stream_ids: list[str]
+ :keyword properties: Collection of custom properties.
+ :paramtype properties: list[~quickpulse_client.models.KeyValuePairString]
+ """
+ super().__init__(**kwargs)
+ self.document_type: Optional[str] = None
+ self.document_stream_ids = document_stream_ids
+ self.properties = properties
+
+
+class DocumentStreamInfo(_serialization.Model):
+ """Configurations/filters set by UX to scope the document/telemetry it's interested in.
+
+ All required parameters must be populated in order to send to server.
+
+ :ivar id: Identifier of the document stream initiated by a UX. Required.
+ :vartype id: str
+ :ivar document_filter_groups: Gets or sets an OR-connected collection of filter groups.
+ Required.
+ :vartype document_filter_groups:
+ list[~quickpulse_client.models.DocumentFilterConjunctionGroupInfo]
+ """
+
+ _validation = {
+ "id": {"required": True},
+ "document_filter_groups": {"required": True},
+ }
+
+ _attribute_map = {
+ "id": {"key": "Id", "type": "str"},
+ "document_filter_groups": {"key": "DocumentFilterGroups", "type": "[DocumentFilterConjunctionGroupInfo]"},
+ }
+
+ def __init__(
+ self,
+ *,
+ id: str, # pylint: disable=redefined-builtin
+ document_filter_groups: List["_models.DocumentFilterConjunctionGroupInfo"],
+ **kwargs: Any
+ ) -> None:
+ """
+ :keyword id: Identifier of the document stream initiated by a UX. Required.
+ :paramtype id: str
+ :keyword document_filter_groups: Gets or sets an OR-connected collection of filter groups.
+ Required.
+ :paramtype document_filter_groups:
+ list[~quickpulse_client.models.DocumentFilterConjunctionGroupInfo]
+ """
+ super().__init__(**kwargs)
+ self.id = id
+ self.document_filter_groups = document_filter_groups
+
+
+class Event(DocumentIngress):
+ """Event document type.
+
+ All required parameters must be populated in order to send to server.
+
+ :ivar document_type: Telemetry type. Types not defined in enum will get replaced with a
+ 'Unknown' type. Required. Known values are: "Request", "RemoteDependency", "Exception",
+ "Event", "Trace", and "Unknown".
+ :vartype document_type: str or ~quickpulse_client.models.DocumentType
+ :ivar document_stream_ids: An array of document streaming ids. Each id identifies a flow of
+ documents customized by UX customers.
+ :vartype document_stream_ids: list[str]
+ :ivar properties: Collection of custom properties.
+ :vartype properties: list[~quickpulse_client.models.KeyValuePairString]
+ :ivar name: Event name.
+ :vartype name: str
+ """
+
+ _validation = {
+ "document_type": {"required": True},
+ "name": {"max_length": 512},
+ }
+
+ _attribute_map = {
+ "document_type": {"key": "DocumentType", "type": "str"},
+ "document_stream_ids": {"key": "DocumentStreamIds", "type": "[str]"},
+ "properties": {"key": "Properties", "type": "[KeyValuePairString]"},
+ "name": {"key": "Name", "type": "str"},
+ }
+
+ def __init__(
+ self,
+ *,
+ document_stream_ids: Optional[List[str]] = None,
+ properties: Optional[List["_models.KeyValuePairString"]] = None,
+ name: Optional[str] = None,
+ **kwargs: Any
+ ) -> None:
+ """
+ :keyword document_stream_ids: An array of document streaming ids. Each id identifies a flow of
+ documents customized by UX customers.
+ :paramtype document_stream_ids: list[str]
+ :keyword properties: Collection of custom properties.
+ :paramtype properties: list[~quickpulse_client.models.KeyValuePairString]
+ :keyword name: Event name.
+ :paramtype name: str
+ """
+ super().__init__(document_stream_ids=document_stream_ids, properties=properties, **kwargs)
+ self.document_type: str = "Event"
+ self.name = name
+
+
+class Exception(DocumentIngress):
+ """Exception document type.
+
+ All required parameters must be populated in order to send to server.
+
+ :ivar document_type: Telemetry type. Types not defined in enum will get replaced with a
+ 'Unknown' type. Required. Known values are: "Request", "RemoteDependency", "Exception",
+ "Event", "Trace", and "Unknown".
+ :vartype document_type: str or ~quickpulse_client.models.DocumentType
+ :ivar document_stream_ids: An array of document streaming ids. Each id identifies a flow of
+ documents customized by UX customers.
+ :vartype document_stream_ids: list[str]
+ :ivar properties: Collection of custom properties.
+ :vartype properties: list[~quickpulse_client.models.KeyValuePairString]
+ :ivar exception_type: Exception type name.
+ :vartype exception_type: str
+ :ivar exception_message: Exception message.
+ :vartype exception_message: str
+ """
+
+ _validation = {
+ "document_type": {"required": True},
+ "exception_type": {"max_length": 1024},
+ "exception_message": {"max_length": 32768},
+ }
+
+ _attribute_map = {
+ "document_type": {"key": "DocumentType", "type": "str"},
+ "document_stream_ids": {"key": "DocumentStreamIds", "type": "[str]"},
+ "properties": {"key": "Properties", "type": "[KeyValuePairString]"},
+ "exception_type": {"key": "ExceptionType", "type": "str"},
+ "exception_message": {"key": "ExceptionMessage", "type": "str"},
+ }
+
+ def __init__(
+ self,
+ *,
+ document_stream_ids: Optional[List[str]] = None,
+ properties: Optional[List["_models.KeyValuePairString"]] = None,
+ exception_type: Optional[str] = None,
+ exception_message: Optional[str] = None,
+ **kwargs: Any
+ ) -> None:
+ """
+ :keyword document_stream_ids: An array of document streaming ids. Each id identifies a flow of
+ documents customized by UX customers.
+ :paramtype document_stream_ids: list[str]
+ :keyword properties: Collection of custom properties.
+ :paramtype properties: list[~quickpulse_client.models.KeyValuePairString]
+ :keyword exception_type: Exception type name.
+ :paramtype exception_type: str
+ :keyword exception_message: Exception message.
+ :paramtype exception_message: str
+ """
+ super().__init__(document_stream_ids=document_stream_ids, properties=properties, **kwargs)
+ self.document_type: str = "Exception"
+ self.exception_type = exception_type
+ self.exception_message = exception_message
+
+
+class FilterConjunctionGroupInfo(_serialization.Model):
+ """An AND-connected group of FilterInfo objects.
+
+ All required parameters must be populated in order to send to server.
+
+ :ivar filters: An array of filters. Required.
+ :vartype filters: list[~quickpulse_client.models.FilterInfo]
+ """
+
+ _validation = {
+ "filters": {"required": True},
+ }
+
+ _attribute_map = {
+ "filters": {"key": "Filters", "type": "[FilterInfo]"},
+ }
+
+ def __init__(self, *, filters: List["_models.FilterInfo"], **kwargs: Any) -> None:
+ """
+ :keyword filters: An array of filters. Required.
+ :paramtype filters: list[~quickpulse_client.models.FilterInfo]
+ """
+ super().__init__(**kwargs)
+ self.filters = filters
+
+
+class FilterInfo(_serialization.Model):
+ """A filter set on UX.
+
+ All required parameters must be populated in order to send to server.
+
+ :ivar field_name: dimension name of the filter. Required.
+ :vartype field_name: str
+ :ivar predicate: Operator of the filter. Required. Known values are: "Equal", "NotEqual",
+ "LessThan", "GreaterThan", "LessThanOrEqual", "GreaterThanOrEqual", "Contains", and
+ "DoesNotContain".
+ :vartype predicate: str or ~quickpulse_client.models.PredicateType
+ :ivar comparand: Comparand of the filter. Required.
+ :vartype comparand: str
+ """
+
+ _validation = {
+ "field_name": {"required": True},
+ "predicate": {"required": True},
+ "comparand": {"required": True},
+ }
+
+ _attribute_map = {
+ "field_name": {"key": "FieldName", "type": "str"},
+ "predicate": {"key": "Predicate", "type": "str"},
+ "comparand": {"key": "Comparand", "type": "str"},
+ }
+
+ def __init__(
+ self, *, field_name: str, predicate: Union[str, "_models.PredicateType"], comparand: str, **kwargs: Any
+ ) -> None:
+ """
+ :keyword field_name: dimension name of the filter. Required.
+ :paramtype field_name: str
+ :keyword predicate: Operator of the filter. Required. Known values are: "Equal", "NotEqual",
+ "LessThan", "GreaterThan", "LessThanOrEqual", "GreaterThanOrEqual", "Contains", and
+ "DoesNotContain".
+ :paramtype predicate: str or ~quickpulse_client.models.PredicateType
+ :keyword comparand: Comparand of the filter. Required.
+ :paramtype comparand: str
+ """
+ super().__init__(**kwargs)
+ self.field_name = field_name
+ self.predicate = predicate
+ self.comparand = comparand
+
+
+class KeyValuePairString(_serialization.Model):
+ """Key-value pair of string and string.
+
+ All required parameters must be populated in order to send to server.
+
+ :ivar key: Key of the key-value pair. Required.
+ :vartype key: str
+ :ivar value: Value of the key-value pair. Required.
+ :vartype value: str
+ """
+
+ _validation = {
+ "key": {"required": True},
+ "value": {"required": True},
+ }
+
+ _attribute_map = {
+ "key": {"key": "key", "type": "str"},
+ "value": {"key": "value", "type": "str"},
+ }
+
+ def __init__(self, *, key: str, value: str, **kwargs: Any) -> None:
+ """
+ :keyword key: Key of the key-value pair. Required.
+ :paramtype key: str
+ :keyword value: Value of the key-value pair. Required.
+ :paramtype value: str
+ """
+ super().__init__(**kwargs)
+ self.key = key
+ self.value = value
+
+
+class MetricPoint(_serialization.Model):
+ """Metric data point.
+
+ All required parameters must be populated in order to send to server.
+
+ :ivar name: Metric name. Required.
+ :vartype name: str
+ :ivar value: Metric value. Required.
+ :vartype value: float
+ :ivar weight: Metric weight. Required.
+ :vartype weight: int
+ """
+
+ _validation = {
+ "name": {"required": True},
+ "value": {"required": True},
+ "weight": {"required": True},
+ }
+
+ _attribute_map = {
+ "name": {"key": "Name", "type": "str"},
+ "value": {"key": "Value", "type": "float"},
+ "weight": {"key": "Weight", "type": "int"},
+ }
+
+ def __init__(self, *, name: str, value: float, weight: int, **kwargs: Any) -> None:
+ """
+ :keyword name: Metric name. Required.
+ :paramtype name: str
+ :keyword value: Metric value. Required.
+ :paramtype value: float
+ :keyword weight: Metric weight. Required.
+ :paramtype weight: int
+ """
+ super().__init__(**kwargs)
+ self.name = name
+ self.value = value
+ self.weight = weight
+
+
+class MonitoringDataPoint(_serialization.Model): # pylint: disable=too-many-instance-attributes
+ """Monitoring data point coming from the client, which includes metrics, documents and other
+ metadata info.
+
+ All required parameters must be populated in order to send to server.
+
+ :ivar version: Application Insights SDK version. Required.
+ :vartype version: str
+ :ivar invariant_version: Version/generation of the data contract (MonitoringDataPoint) between
+ SDK and Live Metrics. Required.
+ :vartype invariant_version: int
+ :ivar instance: Service instance name where Application Insights SDK lives. Required.
+ :vartype instance: str
+ :ivar role_name: Service role name. Required.
+ :vartype role_name: str
+ :ivar machine_name: Computer name where Application Insights SDK lives. Required.
+ :vartype machine_name: str
+ :ivar stream_id: Identifies an Application Insights SDK as a trusted agent to report metrics
+ and documents. Required.
+ :vartype stream_id: str
+ :ivar timestamp: Data point generation timestamp.
+ :vartype timestamp: ~datetime.datetime
+ :ivar transmission_time: Timestamp when the client transmits the metrics and documents to Live
+ Metrics.
+ :vartype transmission_time: ~datetime.datetime
+ :ivar is_web_app: True if the current application is an Azure Web App. Required.
+ :vartype is_web_app: bool
+ :ivar performance_collection_supported: True if performance counters collection is supported.
+ Required.
+ :vartype performance_collection_supported: bool
+ :ivar metrics: An array of metric data points.
+ :vartype metrics: list[~quickpulse_client.models.MetricPoint]
+ :ivar documents: An array of documents of a specific type {Request}, {RemoteDependency},
+ {Exception}, {Event}, or {Trace}.
+ :vartype documents: list[~quickpulse_client.models.DocumentIngress]
+ :ivar top_cpu_processes: An array of top cpu consumption data point.
+ :vartype top_cpu_processes: list[~quickpulse_client.models.ProcessCpuData]
+ :ivar collection_configuration_errors: An array of error while SDK parses and applies the
+ {CollectionConfigurationInfo} provided by Live Metrics.
+ :vartype collection_configuration_errors:
+ list[~quickpulse_client.models.CollectionConfigurationError]
+ """
+
+ _validation = {
+ "version": {"required": True},
+ "invariant_version": {"required": True},
+ "instance": {"required": True},
+ "role_name": {"required": True},
+ "machine_name": {"required": True},
+ "stream_id": {"required": True},
+ "is_web_app": {"required": True},
+ "performance_collection_supported": {"required": True},
+ }
+
+ _attribute_map = {
+ "version": {"key": "Version", "type": "str"},
+ "invariant_version": {"key": "InvariantVersion", "type": "int"},
+ "instance": {"key": "Instance", "type": "str"},
+ "role_name": {"key": "RoleName", "type": "str"},
+ "machine_name": {"key": "MachineName", "type": "str"},
+ "stream_id": {"key": "StreamId", "type": "str"},
+ "timestamp": {"key": "Timestamp", "type": "iso-8601"},
+ "transmission_time": {"key": "TransmissionTime", "type": "iso-8601"},
+ "is_web_app": {"key": "IsWebApp", "type": "bool"},
+ "performance_collection_supported": {"key": "PerformanceCollectionSupported", "type": "bool"},
+ "metrics": {"key": "Metrics", "type": "[MetricPoint]"},
+ "documents": {"key": "Documents", "type": "[DocumentIngress]"},
+ "top_cpu_processes": {"key": "TopCpuProcesses", "type": "[ProcessCpuData]"},
+ "collection_configuration_errors": {
+ "key": "CollectionConfigurationErrors",
+ "type": "[CollectionConfigurationError]",
+ },
+ }
+
+ def __init__(
+ self,
+ *,
+ version: str,
+ invariant_version: int,
+ instance: str,
+ role_name: str,
+ machine_name: str,
+ stream_id: str,
+ is_web_app: bool,
+ performance_collection_supported: bool,
+ timestamp: Optional[datetime.datetime] = None,
+ transmission_time: Optional[datetime.datetime] = None,
+ metrics: Optional[List["_models.MetricPoint"]] = None,
+ documents: Optional[List["_models.DocumentIngress"]] = None,
+ top_cpu_processes: Optional[List["_models.ProcessCpuData"]] = None,
+ collection_configuration_errors: Optional[List["_models.CollectionConfigurationError"]] = None,
+ **kwargs: Any
+ ) -> None:
+ """
+ :keyword version: Application Insights SDK version. Required.
+ :paramtype version: str
+ :keyword invariant_version: Version/generation of the data contract (MonitoringDataPoint)
+ between SDK and Live Metrics. Required.
+ :paramtype invariant_version: int
+ :keyword instance: Service instance name where Application Insights SDK lives. Required.
+ :paramtype instance: str
+ :keyword role_name: Service role name. Required.
+ :paramtype role_name: str
+ :keyword machine_name: Computer name where Application Insights SDK lives. Required.
+ :paramtype machine_name: str
+ :keyword stream_id: Identifies an Application Insights SDK as a trusted agent to report metrics
+ and documents. Required.
+ :paramtype stream_id: str
+ :keyword timestamp: Data point generation timestamp.
+ :paramtype timestamp: ~datetime.datetime
+ :keyword transmission_time: Timestamp when the client transmits the metrics and documents to
+ Live Metrics.
+ :paramtype transmission_time: ~datetime.datetime
+ :keyword is_web_app: True if the current application is an Azure Web App. Required.
+ :paramtype is_web_app: bool
+ :keyword performance_collection_supported: True if performance counters collection is
+ supported. Required.
+ :paramtype performance_collection_supported: bool
+ :keyword metrics: An array of metric data points.
+ :paramtype metrics: list[~quickpulse_client.models.MetricPoint]
+ :keyword documents: An array of documents of a specific type {Request}, {RemoteDependency},
+ {Exception}, {Event}, or {Trace}.
+ :paramtype documents: list[~quickpulse_client.models.DocumentIngress]
+ :keyword top_cpu_processes: An array of top cpu consumption data point.
+ :paramtype top_cpu_processes: list[~quickpulse_client.models.ProcessCpuData]
+ :keyword collection_configuration_errors: An array of error while SDK parses and applies the
+ {CollectionConfigurationInfo} provided by Live Metrics.
+ :paramtype collection_configuration_errors:
+ list[~quickpulse_client.models.CollectionConfigurationError]
+ """
+ super().__init__(**kwargs)
+ self.version = version
+ self.invariant_version = invariant_version
+ self.instance = instance
+ self.role_name = role_name
+ self.machine_name = machine_name
+ self.stream_id = stream_id
+ self.timestamp = timestamp
+ self.transmission_time = transmission_time
+ self.is_web_app = is_web_app
+ self.performance_collection_supported = performance_collection_supported
+ self.metrics = metrics
+ self.documents = documents
+ self.top_cpu_processes = top_cpu_processes
+ self.collection_configuration_errors = collection_configuration_errors
+
+
+class ProcessCpuData(_serialization.Model):
+ """CPU consumption datapoint.
+
+ All required parameters must be populated in order to send to server.
+
+ :ivar process_name: Process name. Required.
+ :vartype process_name: str
+ :ivar cpu_percentage: CPU consumption percentage. Required.
+ :vartype cpu_percentage: int
+ """
+
+ _validation = {
+ "process_name": {"required": True},
+ "cpu_percentage": {"required": True},
+ }
+
+ _attribute_map = {
+ "process_name": {"key": "ProcessName", "type": "str"},
+ "cpu_percentage": {"key": "CpuPercentage", "type": "int"},
+ }
+
+ def __init__(self, *, process_name: str, cpu_percentage: int, **kwargs: Any) -> None:
+ """
+ :keyword process_name: Process name. Required.
+ :paramtype process_name: str
+ :keyword cpu_percentage: CPU consumption percentage. Required.
+ :paramtype cpu_percentage: int
+ """
+ super().__init__(**kwargs)
+ self.process_name = process_name
+ self.cpu_percentage = cpu_percentage
+
+
+class QuotaConfigurationInfo(_serialization.Model):
+ """Controls document quotas to be sent to Live Metrics.
+
+ All required parameters must be populated in order to send to server.
+
+ :ivar initial_quota: Initial quota.
+ :vartype initial_quota: float
+ :ivar max_quota: Max quota. Required.
+ :vartype max_quota: float
+ :ivar quota_accrual_rate_per_sec: Quota accrual rate per second. Required.
+ :vartype quota_accrual_rate_per_sec: float
+ """
+
+ _validation = {
+ "max_quota": {"required": True},
+ "quota_accrual_rate_per_sec": {"required": True},
+ }
+
+ _attribute_map = {
+ "initial_quota": {"key": "InitialQuota", "type": "float"},
+ "max_quota": {"key": "MaxQuota", "type": "float"},
+ "quota_accrual_rate_per_sec": {"key": "QuotaAccrualRatePerSec", "type": "float"},
+ }
+
+ def __init__(
+ self,
+ *,
+ max_quota: float,
+ quota_accrual_rate_per_sec: float,
+ initial_quota: Optional[float] = None,
+ **kwargs: Any
+ ) -> None:
+ """
+ :keyword initial_quota: Initial quota.
+ :paramtype initial_quota: float
+ :keyword max_quota: Max quota. Required.
+ :paramtype max_quota: float
+ :keyword quota_accrual_rate_per_sec: Quota accrual rate per second. Required.
+ :paramtype quota_accrual_rate_per_sec: float
+ """
+ super().__init__(**kwargs)
+ self.initial_quota = initial_quota
+ self.max_quota = max_quota
+ self.quota_accrual_rate_per_sec = quota_accrual_rate_per_sec
+
+
+class RemoteDependency(DocumentIngress):
+ """RemoteDependency document type.
+
+ All required parameters must be populated in order to send to server.
+
+ :ivar document_type: Telemetry type. Types not defined in enum will get replaced with a
+ 'Unknown' type. Required. Known values are: "Request", "RemoteDependency", "Exception",
+ "Event", "Trace", and "Unknown".
+ :vartype document_type: str or ~quickpulse_client.models.DocumentType
+ :ivar document_stream_ids: An array of document streaming ids. Each id identifies a flow of
+ documents customized by UX customers.
+ :vartype document_stream_ids: list[str]
+ :ivar properties: Collection of custom properties.
+ :vartype properties: list[~quickpulse_client.models.KeyValuePairString]
+ :ivar name: Name of the command initiated with this dependency call, e.g., GET /username.
+ :vartype name: str
+ :ivar command_name: URL of the dependency call to the target, with all query string parameters.
+ :vartype command_name: str
+ :ivar result_code: Result code of a dependency call. Examples are SQL error code and HTTP
+ status code.
+ :vartype result_code: str
+ :ivar duration: Request duration in ISO 8601 duration format, i.e., P[n]Y[n]M[n]DT[n]H[n]M[n]S
+ or P[n]W.
+ :vartype duration: str
+ """
+
+ _validation = {
+ "document_type": {"required": True},
+ "name": {"max_length": 1024},
+ "command_name": {"max_length": 2048},
+ "result_code": {"max_length": 1024},
+ }
+
+ _attribute_map = {
+ "document_type": {"key": "DocumentType", "type": "str"},
+ "document_stream_ids": {"key": "DocumentStreamIds", "type": "[str]"},
+ "properties": {"key": "Properties", "type": "[KeyValuePairString]"},
+ "name": {"key": "Name", "type": "str"},
+ "command_name": {"key": "CommandName", "type": "str"},
+ "result_code": {"key": "ResultCode", "type": "str"},
+ "duration": {"key": "Duration", "type": "str"},
+ }
+
+ def __init__(
+ self,
+ *,
+ document_stream_ids: Optional[List[str]] = None,
+ properties: Optional[List["_models.KeyValuePairString"]] = None,
+ name: Optional[str] = None,
+ command_name: Optional[str] = None,
+ result_code: Optional[str] = None,
+ duration: Optional[str] = None,
+ **kwargs: Any
+ ) -> None:
+ """
+ :keyword document_stream_ids: An array of document streaming ids. Each id identifies a flow of
+ documents customized by UX customers.
+ :paramtype document_stream_ids: list[str]
+ :keyword properties: Collection of custom properties.
+ :paramtype properties: list[~quickpulse_client.models.KeyValuePairString]
+ :keyword name: Name of the command initiated with this dependency call, e.g., GET /username.
+ :paramtype name: str
+ :keyword command_name: URL of the dependency call to the target, with all query string
+ parameters.
+ :paramtype command_name: str
+ :keyword result_code: Result code of a dependency call. Examples are SQL error code and HTTP
+ status code.
+ :paramtype result_code: str
+ :keyword duration: Request duration in ISO 8601 duration format, i.e.,
+ P[n]Y[n]M[n]DT[n]H[n]M[n]S or P[n]W.
+ :paramtype duration: str
+ """
+ super().__init__(document_stream_ids=document_stream_ids, properties=properties, **kwargs)
+ self.document_type: str = "RemoteDependency"
+ self.name = name
+ self.command_name = command_name
+ self.result_code = result_code
+ self.duration = duration
+
+
+class Request(DocumentIngress):
+ """Request document type.
+
+ All required parameters must be populated in order to send to server.
+
+ :ivar document_type: Telemetry type. Types not defined in enum will get replaced with a
+ 'Unknown' type. Required. Known values are: "Request", "RemoteDependency", "Exception",
+ "Event", "Trace", and "Unknown".
+ :vartype document_type: str or ~quickpulse_client.models.DocumentType
+ :ivar document_stream_ids: An array of document streaming ids. Each id identifies a flow of
+ documents customized by UX customers.
+ :vartype document_stream_ids: list[str]
+ :ivar properties: Collection of custom properties.
+ :vartype properties: list[~quickpulse_client.models.KeyValuePairString]
+ :ivar name: Name of the request, e.g., 'GET /values/{id}'.
+ :vartype name: str
+ :ivar url: Request URL with all query string parameters.
+ :vartype url: str
+ :ivar response_code: Result of a request execution. For http requests, it could be some HTTP
+ status code.
+ :vartype response_code: str
+ :ivar duration: Request duration in ISO 8601 duration format, i.e., P[n]Y[n]M[n]DT[n]H[n]M[n]S
+ or P[n]W.
+ :vartype duration: str
+ """
+
+ _validation = {
+ "document_type": {"required": True},
+ "name": {"max_length": 1024},
+ "url": {"max_length": 2048},
+ "response_code": {"max_length": 1024},
+ }
+
+ _attribute_map = {
+ "document_type": {"key": "DocumentType", "type": "str"},
+ "document_stream_ids": {"key": "DocumentStreamIds", "type": "[str]"},
+ "properties": {"key": "Properties", "type": "[KeyValuePairString]"},
+ "name": {"key": "Name", "type": "str"},
+ "url": {"key": "Url", "type": "str"},
+ "response_code": {"key": "ResponseCode", "type": "str"},
+ "duration": {"key": "Duration", "type": "str"},
+ }
+
+ def __init__(
+ self,
+ *,
+ document_stream_ids: Optional[List[str]] = None,
+ properties: Optional[List["_models.KeyValuePairString"]] = None,
+ name: Optional[str] = None,
+ url: Optional[str] = None,
+ response_code: Optional[str] = None,
+ duration: Optional[str] = None,
+ **kwargs: Any
+ ) -> None:
+ """
+ :keyword document_stream_ids: An array of document streaming ids. Each id identifies a flow of
+ documents customized by UX customers.
+ :paramtype document_stream_ids: list[str]
+ :keyword properties: Collection of custom properties.
+ :paramtype properties: list[~quickpulse_client.models.KeyValuePairString]
+ :keyword name: Name of the request, e.g., 'GET /values/{id}'.
+ :paramtype name: str
+ :keyword url: Request URL with all query string parameters.
+ :paramtype url: str
+ :keyword response_code: Result of a request execution. For http requests, it could be some HTTP
+ status code.
+ :paramtype response_code: str
+ :keyword duration: Request duration in ISO 8601 duration format, i.e.,
+ P[n]Y[n]M[n]DT[n]H[n]M[n]S or P[n]W.
+ :paramtype duration: str
+ """
+ super().__init__(document_stream_ids=document_stream_ids, properties=properties, **kwargs)
+ self.document_type: str = "Request"
+ self.name = name
+ self.url = url
+ self.response_code = response_code
+ self.duration = duration
+
+
+class ServiceError(_serialization.Model):
+ """Optional http response body, whose existence carries additional error descriptions.
+
+ All required parameters must be populated in order to send to server.
+
+ :ivar request_id: A globally unique identifier to identify the diagnostic context. It defaults
+ to the empty GUID.
+ :vartype request_id: str
+ :ivar response_date_time: Service error response date time. Required.
+ :vartype response_date_time: str
+ :ivar code: Error code. Required.
+ :vartype code: str
+ :ivar message: Error message. Required.
+ :vartype message: str
+ :ivar exception: Message of the exception that triggers the error response. Required.
+ :vartype exception: str
+ """
+
+ _validation = {
+ "request_id": {"required": True},
+ "response_date_time": {"required": True},
+ "code": {"required": True},
+ "message": {"required": True},
+ "exception": {"required": True},
+ }
+
+ _attribute_map = {
+ "request_id": {"key": "RequestId", "type": "str"},
+ "response_date_time": {"key": "ResponseDateTime", "type": "str"},
+ "code": {"key": "Code", "type": "str"},
+ "message": {"key": "Message", "type": "str"},
+ "exception": {"key": "Exception", "type": "str"},
+ }
+
+ def __init__(
+ self,
+ *,
+ request_id: str = "00000000-0000-0000-0000-000000000000",
+ response_date_time: str,
+ code: str,
+ message: str,
+ exception: str,
+ **kwargs: Any
+ ) -> None:
+ """
+ :keyword request_id: A globally unique identifier to identify the diagnostic context. It
+ defaults to the empty GUID.
+ :paramtype request_id: str
+ :keyword response_date_time: Service error response date time. Required.
+ :paramtype response_date_time: str
+ :keyword code: Error code. Required.
+ :paramtype code: str
+ :keyword message: Error message. Required.
+ :paramtype message: str
+ :keyword exception: Message of the exception that triggers the error response. Required.
+ :paramtype exception: str
+ """
+ super().__init__(**kwargs)
+ self.request_id = request_id
+ self.response_date_time = response_date_time
+ self.code = code
+ self.message = message
+ self.exception = exception
+
+
+class Trace(DocumentIngress):
+ """Trace document type.
+
+ All required parameters must be populated in order to send to server.
+
+ :ivar document_type: Telemetry type. Types not defined in enum will get replaced with a
+ 'Unknown' type. Required. Known values are: "Request", "RemoteDependency", "Exception",
+ "Event", "Trace", and "Unknown".
+ :vartype document_type: str or ~quickpulse_client.models.DocumentType
+ :ivar document_stream_ids: An array of document streaming ids. Each id identifies a flow of
+ documents customized by UX customers.
+ :vartype document_stream_ids: list[str]
+ :ivar properties: Collection of custom properties.
+ :vartype properties: list[~quickpulse_client.models.KeyValuePairString]
+ :ivar message: Trace message.
+ :vartype message: str
+ """
+
+ _validation = {
+ "document_type": {"required": True},
+ "message": {"max_length": 32768},
+ }
+
+ _attribute_map = {
+ "document_type": {"key": "DocumentType", "type": "str"},
+ "document_stream_ids": {"key": "DocumentStreamIds", "type": "[str]"},
+ "properties": {"key": "Properties", "type": "[KeyValuePairString]"},
+ "message": {"key": "Message", "type": "str"},
+ }
+
+ def __init__(
+ self,
+ *,
+ document_stream_ids: Optional[List[str]] = None,
+ properties: Optional[List["_models.KeyValuePairString"]] = None,
+ message: Optional[str] = None,
+ **kwargs: Any
+ ) -> None:
+ """
+ :keyword document_stream_ids: An array of document streaming ids. Each id identifies a flow of
+ documents customized by UX customers.
+ :paramtype document_stream_ids: list[str]
+ :keyword properties: Collection of custom properties.
+ :paramtype properties: list[~quickpulse_client.models.KeyValuePairString]
+ :keyword message: Trace message.
+ :paramtype message: str
+ """
+ super().__init__(document_stream_ids=document_stream_ids, properties=properties, **kwargs)
+ self.document_type: str = "Trace"
+ self.message = message
diff --git a/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/models/_patch.py b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/models/_patch.py
new file mode 100644
index 00000000..f7dd3251
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/models/_patch.py
@@ -0,0 +1,20 @@
+# ------------------------------------
+# Copyright (c) Microsoft Corporation.
+# Licensed under the MIT License.
+# ------------------------------------
+"""Customize generated code here.
+
+Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize
+"""
+from typing import List
+
+__all__: List[str] = [] # Add all objects you want publicly available to users at this package level
+
+
+def patch_sdk():
+ """Do not remove from this file.
+
+ `patch_sdk` is a last resort escape hatch that allows you to do customizations
+ you can't accomplish using the techniques described in
+ https://aka.ms/azsdk/python/dpcodegen/python/customize
+ """
diff --git a/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/py.typed b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/py.typed
new file mode 100644
index 00000000..e5aff4f8
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_generated/py.typed
@@ -0,0 +1 @@
+# Marker file for PEP 561. \ No newline at end of file
diff --git a/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_live_metrics.py b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_live_metrics.py
new file mode 100644
index 00000000..7ce7874f
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_live_metrics.py
@@ -0,0 +1,306 @@
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License.
+# cSpell:disable
+from typing import Any, Dict, List, Optional
+
+import logging
+import platform
+import psutil
+
+from opentelemetry.sdk._logs import LogData
+from opentelemetry.sdk.metrics import MeterProvider
+from opentelemetry.sdk.resources import Resource
+from opentelemetry.sdk.trace import ReadableSpan
+from opentelemetry.sdk.trace.id_generator import RandomIdGenerator
+from opentelemetry.semconv.trace import SpanAttributes
+from opentelemetry.trace import SpanKind
+
+from azure.monitor.opentelemetry.exporter._generated.models import ContextTagKeys
+from azure.monitor.opentelemetry.exporter._quickpulse._constants import (
+ _COMMITTED_BYTES_NAME,
+ _DEPENDENCY_DURATION_NAME,
+ _DEPENDENCY_FAILURE_RATE_NAME,
+ _DEPENDENCY_RATE_NAME,
+ _EXCEPTION_RATE_NAME,
+ _PROCESS_PHYSICAL_BYTES_NAME,
+ _PROCESS_TIME_NORMALIZED_NAME,
+ _PROCESSOR_TIME_NAME,
+ _REQUEST_DURATION_NAME,
+ _REQUEST_FAILURE_RATE_NAME,
+ _REQUEST_RATE_NAME,
+)
+from azure.monitor.opentelemetry.exporter._quickpulse._cpu import (
+ _get_process_memory,
+ _get_process_time_normalized,
+ _get_process_time_normalized_old,
+)
+from azure.monitor.opentelemetry.exporter._quickpulse._exporter import (
+ _QuickpulseExporter,
+ _QuickpulseMetricReader,
+)
+from azure.monitor.opentelemetry.exporter._quickpulse._filter import (
+ _check_filters,
+ _check_metric_filters,
+)
+from azure.monitor.opentelemetry.exporter._quickpulse._generated.models import (
+ DerivedMetricInfo,
+ FilterConjunctionGroupInfo,
+ MonitoringDataPoint,
+ TelemetryType,
+)
+from azure.monitor.opentelemetry.exporter._quickpulse._projection import (
+ _create_projections,
+)
+from azure.monitor.opentelemetry.exporter._quickpulse._state import (
+ _QuickpulseState,
+ _is_post_state,
+ _append_quickpulse_document,
+ _get_quickpulse_derived_metric_infos,
+ _get_quickpulse_doc_stream_infos,
+ _set_global_quickpulse_state,
+)
+from azure.monitor.opentelemetry.exporter._quickpulse._types import (
+ _DependencyData,
+ _ExceptionData,
+ _RequestData,
+ _TelemetryData,
+ _TraceData,
+)
+from azure.monitor.opentelemetry.exporter._quickpulse._utils import (
+ _get_log_record_document,
+ _get_span_document,
+)
+from azure.monitor.opentelemetry.exporter.statsbeat._state import (
+ set_statsbeat_live_metrics_feature_set,
+)
+from azure.monitor.opentelemetry.exporter._utils import (
+ _get_sdk_version,
+ _is_on_app_service,
+ _populate_part_a_fields,
+ Singleton,
+)
+
+_logger = logging.getLogger(__name__)
+
+
+PROCESS = psutil.Process()
+NUM_CPUS = psutil.cpu_count()
+
+
+def enable_live_metrics(**kwargs: Any) -> None: # pylint: disable=C4758
+ """Live metrics entry point.
+
+ :keyword str connection_string: The connection string used for your Application Insights resource.
+ :keyword Resource resource: The OpenTelemetry Resource used for this Python application.
+ :keyword TokenCredential credential: Token credential, such as ManagedIdentityCredential or
+ ClientSecretCredential, used for Azure Active Directory (AAD) authentication. Defaults to None.
+ :rtype: None
+ """
+ _QuickpulseManager(**kwargs)
+ # We can detect feature usage for statsbeat since we are in an opt-in model currently
+ # Once we move to live metrics on-by-default, we will have to check for both explicit usage
+ # and whether or not user is actually using live metrics (being on live metrics blade in UX)
+ set_statsbeat_live_metrics_feature_set()
+
+
+# pylint: disable=protected-access,too-many-instance-attributes
+class _QuickpulseManager(metaclass=Singleton):
+
+ def __init__(self, **kwargs: Any) -> None:
+ _set_global_quickpulse_state(_QuickpulseState.PING_SHORT)
+ self._exporter = _QuickpulseExporter(**kwargs)
+ part_a_fields = {}
+ resource = kwargs.get("resource")
+ if not resource:
+ resource = Resource.create({})
+ part_a_fields = _populate_part_a_fields(resource)
+ id_generator = RandomIdGenerator()
+ self._base_monitoring_data_point = MonitoringDataPoint(
+ version=_get_sdk_version(),
+ # Invariant version 5 indicates filtering is supported
+ invariant_version=5,
+ instance=part_a_fields.get(ContextTagKeys.AI_CLOUD_ROLE_INSTANCE, ""),
+ role_name=part_a_fields.get(ContextTagKeys.AI_CLOUD_ROLE, ""),
+ machine_name=platform.node(),
+ stream_id=str(id_generator.generate_trace_id()),
+ is_web_app=_is_on_app_service(),
+ performance_collection_supported=True,
+ )
+ self._reader = _QuickpulseMetricReader(self._exporter, self._base_monitoring_data_point)
+ self._meter_provider = MeterProvider(
+ metric_readers=[self._reader],
+ resource=resource,
+ )
+ self._meter = self._meter_provider.get_meter("azure_monitor_live_metrics")
+
+ self._request_duration = self._meter.create_histogram(
+ _REQUEST_DURATION_NAME[0], "ms", "live metrics avg request duration in ms"
+ )
+ self._dependency_duration = self._meter.create_histogram(
+ _DEPENDENCY_DURATION_NAME[0], "ms", "live metrics avg dependency duration in ms"
+ )
+ # We use a counter to represent rates per second because collection
+ # interval is one second so we simply need the number of requests
+ # within the collection interval
+ self._request_rate_counter = self._meter.create_counter(
+ _REQUEST_RATE_NAME[0], "req/sec", "live metrics request rate per second"
+ )
+ self._request_failed_rate_counter = self._meter.create_counter(
+ _REQUEST_FAILURE_RATE_NAME[0], "req/sec", "live metrics request failed rate per second"
+ )
+ self._dependency_rate_counter = self._meter.create_counter(
+ _DEPENDENCY_RATE_NAME[0], "dep/sec", "live metrics dependency rate per second"
+ )
+ self._dependency_failure_rate_counter = self._meter.create_counter(
+ _DEPENDENCY_FAILURE_RATE_NAME[0], "dep/sec", "live metrics dependency failure rate per second"
+ )
+ self._exception_rate_counter = self._meter.create_counter(
+ _EXCEPTION_RATE_NAME[0], "exc/sec", "live metrics exception rate per second"
+ )
+ self._process_memory_gauge_old = self._meter.create_observable_gauge(
+ _COMMITTED_BYTES_NAME[0],
+ [_get_process_memory],
+ )
+ self._process_memory_gauge = self._meter.create_observable_gauge(
+ _PROCESS_PHYSICAL_BYTES_NAME[0],
+ [_get_process_memory],
+ )
+ self._process_time_gauge_old = self._meter.create_observable_gauge(
+ _PROCESSOR_TIME_NAME[0],
+ [_get_process_time_normalized_old],
+ )
+ self._process_time_gauge = self._meter.create_observable_gauge(
+ _PROCESS_TIME_NORMALIZED_NAME[0],
+ [_get_process_time_normalized],
+ )
+
+ def _record_span(self, span: ReadableSpan) -> None:
+ # Only record if in post state
+ if _is_post_state():
+ try:
+ duration_ms = 0
+ if span.end_time and span.start_time:
+ duration_ms = (span.end_time - span.start_time) / 1e9 # type: ignore
+ # TODO: Spec out what "success" is
+ success = span.status.is_ok
+
+ if span.kind in (SpanKind.SERVER, SpanKind.CONSUMER):
+ if success:
+ self._request_rate_counter.add(1)
+ else:
+ self._request_failed_rate_counter.add(1)
+ self._request_duration.record(duration_ms)
+ else:
+ if success:
+ self._dependency_rate_counter.add(1)
+ else:
+ self._dependency_failure_rate_counter.add(1)
+ self._dependency_duration.record(duration_ms)
+
+ # Derive metrics for quickpulse filtering
+ data = _TelemetryData._from_span(span)
+ _derive_metrics_from_telemetry_data(data)
+
+ # Process docs for quickpulse filtering
+ _apply_document_filters_from_telemetry_data(data)
+
+ # Derive exception metrics from span events
+ if span.events:
+ for event in span.events:
+ if event.name == "exception":
+ self._exception_rate_counter.add(1)
+ # Derive metrics for quickpulse filtering for exception
+ exc_data = _ExceptionData._from_span_event(event)
+ _derive_metrics_from_telemetry_data(exc_data)
+ # Process docs for quickpulse filtering for exception
+ _apply_document_filters_from_telemetry_data(exc_data)
+ except Exception: # pylint: disable=broad-except
+ _logger.exception("Exception occurred while recording span.")
+
+ def _record_log_record(self, log_data: LogData) -> None:
+ # Only record if in post state
+ if _is_post_state():
+ try:
+ if log_data.log_record:
+ exc_type = None
+ log_record = log_data.log_record
+ if log_record.attributes:
+ exc_type = log_record.attributes.get(SpanAttributes.EXCEPTION_TYPE)
+ exc_message = log_record.attributes.get(SpanAttributes.EXCEPTION_MESSAGE)
+ if exc_type is not None or exc_message is not None:
+ self._exception_rate_counter.add(1)
+
+ # Derive metrics for quickpulse filtering
+ data = _TelemetryData._from_log_record(log_record)
+ _derive_metrics_from_telemetry_data(data)
+
+ # Process docs for quickpulse filtering
+ _apply_document_filters_from_telemetry_data(data, exc_type) # type: ignore
+ except Exception: # pylint: disable=broad-except
+ _logger.exception("Exception occurred while recording log record.")
+
+
+# Filtering
+
+# Called by record_span/record_log when processing a span/log_record for metrics filtering
+# Derives metrics from projections if applicable to current filters in config
+def _derive_metrics_from_telemetry_data(data: _TelemetryData):
+ metric_infos_dict: Dict[TelemetryType, List[DerivedMetricInfo]] = _get_quickpulse_derived_metric_infos()
+ # if empty, filtering was not configured
+ if not metric_infos_dict:
+ return
+ metric_infos = [] # type: ignore
+ if isinstance(data, _RequestData):
+ metric_infos = metric_infos_dict.get(TelemetryType.REQUEST) # type: ignore
+ elif isinstance(data, _DependencyData):
+ metric_infos = metric_infos_dict.get(TelemetryType.DEPENDENCY) # type: ignore
+ elif isinstance(data, _ExceptionData):
+ metric_infos = metric_infos_dict.get(TelemetryType.EXCEPTION) # type: ignore
+ elif isinstance(data, _TraceData):
+ metric_infos = metric_infos_dict.get(TelemetryType.TRACE) # type: ignore
+ if metric_infos and _check_metric_filters(metric_infos, data):
+ # Since this data matches the filter, create projections used to
+ # generate filtered metrics
+ _create_projections(metric_infos, data)
+
+
+# Called by record_span/record_log when processing a span/log_record for docs filtering
+# Finds doc stream Ids and their doc filter configurations
+def _apply_document_filters_from_telemetry_data(data: _TelemetryData, exc_type: Optional[str] = None):
+ doc_config_dict: Dict[TelemetryType, Dict[str, List[FilterConjunctionGroupInfo]]] = _get_quickpulse_doc_stream_infos() # pylint: disable=C0301
+ stream_ids = set()
+ doc_config = {} # type: ignore
+ if isinstance(data, _RequestData):
+ doc_config = doc_config_dict.get(TelemetryType.REQUEST, {}) # type: ignore
+ elif isinstance(data, _DependencyData):
+ doc_config = doc_config_dict.get(TelemetryType.DEPENDENCY, {}) # type: ignore
+ elif isinstance(data, _ExceptionData):
+ doc_config = doc_config_dict.get(TelemetryType.EXCEPTION, {}) # type: ignore
+ elif isinstance(data, _TraceData):
+ doc_config = doc_config_dict.get(TelemetryType.TRACE, {}) # type: ignore
+ for stream_id, filter_groups in doc_config.items():
+ for filter_group in filter_groups:
+ if _check_filters(filter_group.filters, data):
+ stream_ids.add(stream_id)
+ break
+
+ # We only append and send the document if either:
+ # 1. The document matched the filtering for a specific streamId
+ # 2. Filtering was not enabled for this telemetry type (empty doc_config)
+ if len(stream_ids) > 0 or not doc_config:
+ if type(data) in (_DependencyData, _RequestData):
+ document = _get_span_document(data) # type: ignore
+ else:
+ document = _get_log_record_document(data, exc_type) # type: ignore
+ # A stream (with a unique streamId) is relevant if there are multiple sources sending to the same
+ # ApplicationInsights instace with live metrics enabled
+ # Modify the document's streamIds to determine which stream to send to in post
+ # Note that the default case is that the list of document_stream_ids is empty, in which
+ # case no filtering is done for the telemetry type and it is sent to all streams
+ if stream_ids:
+ document.document_stream_ids = list(stream_ids)
+
+ # Add the generated document to be sent to quickpulse
+ _append_quickpulse_document(document)
+
+# cSpell:enable
diff --git a/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_policy.py b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_policy.py
new file mode 100644
index 00000000..b0d84aea
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_policy.py
@@ -0,0 +1,36 @@
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License.
+
+from typing import Any, Optional
+from urllib.parse import urlparse
+from weakref import ReferenceType
+
+from azure.core.pipeline import PipelineResponse, policies
+
+from azure.monitor.opentelemetry.exporter._quickpulse._constants import _QUICKPULSE_REDIRECT_HEADER_NAME
+from azure.monitor.opentelemetry.exporter._quickpulse._generated import QuickpulseClient
+
+
+# Quickpulse endpoint handles redirects via header instead of status codes
+# We use a custom RedirectPolicy to handle this use case
+# pylint: disable=protected-access
+class _QuickpulseRedirectPolicy(policies.RedirectPolicy):
+
+ def __init__(self, **kwargs: Any) -> None:
+ # Weakref to QuickPulseClient instance
+ self._qp_client_ref: Optional[ReferenceType[QuickpulseClient]] = None
+ super().__init__(**kwargs)
+
+ # Gets the redirect location from header
+ def get_redirect_location(self, response: PipelineResponse) -> Optional[str]:
+ redirect_location = response.http_response.headers.get(_QUICKPULSE_REDIRECT_HEADER_NAME)
+ qp_client = None
+ if redirect_location:
+ redirected_url = urlparse(redirect_location)
+ if redirected_url.scheme and redirected_url.netloc:
+ if self._qp_client_ref:
+ qp_client = self._qp_client_ref()
+ if qp_client and qp_client._client:
+ # Set new endpoint to redirect location
+ qp_client._client._base_url = f"{redirected_url.scheme}://{redirected_url.netloc}"
+ return redirect_location # type: ignore
diff --git a/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_processor.py b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_processor.py
new file mode 100644
index 00000000..0bcc69ec
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_processor.py
@@ -0,0 +1,33 @@
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License.
+
+from opentelemetry.sdk._logs import LogData, LogRecordProcessor
+from opentelemetry.sdk.trace import ReadableSpan, SpanProcessor
+
+from azure.monitor.opentelemetry.exporter._quickpulse._live_metrics import _QuickpulseManager
+
+
+# pylint: disable=protected-access
+class _QuickpulseLogRecordProcessor(LogRecordProcessor):
+
+ def emit(self, log_data: LogData) -> None: # type: ignore
+ qpm = _QuickpulseManager._instance
+ if qpm:
+ qpm._record_log_record(log_data)
+ super().emit(log_data) # type: ignore[safe-super]
+
+ def shutdown(self):
+ pass
+
+ def force_flush(self, timeout_millis: int = 30000):
+ super().force_flush(timeout_millis=timeout_millis) # type: ignore[safe-super]
+
+
+# pylint: disable=protected-access
+class _QuickpulseSpanProcessor(SpanProcessor):
+
+ def on_end(self, span: ReadableSpan) -> None:
+ qpm = _QuickpulseManager._instance
+ if qpm:
+ qpm._record_span(span)
+ return super().on_end(span)
diff --git a/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_projection.py b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_projection.py
new file mode 100644
index 00000000..998e422a
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_projection.py
@@ -0,0 +1,98 @@
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License.
+from typing import List, Optional, Tuple
+
+from azure.monitor.opentelemetry.exporter._quickpulse._constants import (
+ _QUICKPULSE_PROJECTION_COUNT,
+ _QUICKPULSE_PROJECTION_CUSTOM,
+ _QUICKPULSE_PROJECTION_DURATION,
+ _QUICKPULSE_PROJECTION_MAX_VALUE,
+ _QUICKPULSE_PROJECTION_MIN_VALUE,
+)
+from azure.monitor.opentelemetry.exporter._quickpulse._generated.models import (
+ AggregationType,
+ DerivedMetricInfo,
+)
+from azure.monitor.opentelemetry.exporter._quickpulse._state import (
+ _get_quickpulse_projection_map,
+ _set_quickpulse_projection_map,
+)
+from azure.monitor.opentelemetry.exporter._quickpulse._types import (
+ _DependencyData,
+ _RequestData,
+ _TelemetryData,
+)
+
+
+# Initialize metric projections per DerivedMetricInfo
+def _init_derived_metric_projection(filter_info: DerivedMetricInfo):
+ derived_metric_agg_value = 0
+ if filter_info.aggregation == AggregationType.MIN:
+ derived_metric_agg_value = _QUICKPULSE_PROJECTION_MAX_VALUE
+ elif filter_info.aggregation == AggregationType.MAX:
+ derived_metric_agg_value = _QUICKPULSE_PROJECTION_MIN_VALUE
+ elif filter_info.aggregation == AggregationType.SUM:
+ derived_metric_agg_value = 0
+ elif filter_info.aggregation == AggregationType.AVG:
+ derived_metric_agg_value = 0
+ _set_quickpulse_projection_map(
+ filter_info.id,
+ AggregationType(filter_info.aggregation),
+ derived_metric_agg_value,
+ 0,
+ )
+
+
+# Create projections based off of DerivedMetricInfos and current data being processed
+def _create_projections(metric_infos: List[DerivedMetricInfo], data: _TelemetryData):
+ for metric_info in metric_infos:
+ value = 0
+ if metric_info.projection == _QUICKPULSE_PROJECTION_COUNT:
+ value = 1
+ elif metric_info.projection == _QUICKPULSE_PROJECTION_DURATION:
+ if isinstance(data, (_DependencyData, _RequestData)):
+ value = data.duration # type: ignore
+ else:
+ # Duration only supported for Dependency and Requests
+ continue
+ elif metric_info.projection.startswith(_QUICKPULSE_PROJECTION_CUSTOM):
+ key = metric_info.projection.split(_QUICKPULSE_PROJECTION_CUSTOM, 1)[1].strip()
+ dim_value = data.custom_dimensions.get(key, 0)
+ if dim_value is None:
+ continue
+ try:
+ value = float(dim_value) # type: ignore
+ except ValueError:
+ continue
+ else:
+ continue
+
+ aggregate: Optional[Tuple[float, int]] = _calculate_aggregation(
+ AggregationType(metric_info.aggregation),
+ metric_info.id,
+ value,
+ )
+ if aggregate:
+ _set_quickpulse_projection_map(
+ metric_info.id,
+ AggregationType(metric_info.aggregation),
+ aggregate[0],
+ aggregate[1],
+ )
+
+
+# Calculate aggregation based off of previous projection value, aggregation type of a specific metric filter
+# Return type is a Tuple of (value, count)
+def _calculate_aggregation(aggregation: AggregationType, id: str, value: float) -> Optional[Tuple[float, int]]:
+ projection: Optional[Tuple[AggregationType, float, int]] = _get_quickpulse_projection_map().get(id)
+ if projection:
+ prev_value = projection[1]
+ prev_count = projection[2]
+ if aggregation == AggregationType.SUM:
+ return (prev_value + value, prev_count + 1)
+ if aggregation == AggregationType.MIN:
+ return (min(prev_value, value), prev_count + 1)
+ if aggregation == AggregationType.MAX:
+ return (max(prev_value, value), prev_count + 1)
+ return (prev_value + value, prev_count + 1)
+ return None
diff --git a/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_state.py b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_state.py
new file mode 100644
index 00000000..f19bccda
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_state.py
@@ -0,0 +1,190 @@
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License.
+from datetime import datetime
+from enum import Enum
+from typing import Dict, List, Tuple
+
+from azure.monitor.opentelemetry.exporter._quickpulse._constants import (
+ _LONG_PING_INTERVAL_SECONDS,
+ _POST_INTERVAL_SECONDS,
+ _QUICKPULSE_PROJECTION_MAX_VALUE,
+ _QUICKPULSE_PROJECTION_MIN_VALUE,
+ _SHORT_PING_INTERVAL_SECONDS,
+)
+from azure.monitor.opentelemetry.exporter._quickpulse._generated.models import (
+ AggregationType,
+ DerivedMetricInfo,
+ DocumentIngress,
+ FilterConjunctionGroupInfo,
+ TelemetryType,
+)
+
+
+class _QuickpulseState(Enum):
+ """Current state of quickpulse service.
+ The numerical value represents the ping/post interval in ms for those states.
+ """
+
+ OFFLINE = 0
+ PING_SHORT = _SHORT_PING_INTERVAL_SECONDS
+ PING_LONG = _LONG_PING_INTERVAL_SECONDS
+ POST_SHORT = _POST_INTERVAL_SECONDS
+
+
+_GLOBAL_QUICKPULSE_STATE = _QuickpulseState.OFFLINE
+_QUICKPULSE_DOCUMENTS: List[DocumentIngress] = []
+_QUICKPULSE_LAST_PROCESS_TIME = 0.0
+_QUICKPULSE_PROCESS_ELAPSED_TIME = datetime.now()
+_QUICKPULSE_LAST_PROCESS_CPU = 0.0
+# Filtering
+_QUICKPULSE_ETAG = ""
+_QUICKPULSE_DERIVED_METRIC_INFOS: Dict[TelemetryType, List[DerivedMetricInfo]] = {}
+_QUICKPULSE_PROJECTION_MAP: Dict[str, Tuple[AggregationType, float, int]] = {}
+_QUICKPULSE_DOC_STREAM_INFOS: Dict[TelemetryType, Dict[str, List[FilterConjunctionGroupInfo]]] = {}
+
+
+def _set_global_quickpulse_state(state: _QuickpulseState) -> None:
+ # pylint: disable=global-statement
+ global _GLOBAL_QUICKPULSE_STATE
+ _GLOBAL_QUICKPULSE_STATE = state
+
+
+def _get_global_quickpulse_state() -> _QuickpulseState:
+ return _GLOBAL_QUICKPULSE_STATE
+
+
+def _set_quickpulse_last_process_time(time: float) -> None:
+ # pylint: disable=global-statement
+ global _QUICKPULSE_LAST_PROCESS_TIME
+ _QUICKPULSE_LAST_PROCESS_TIME = time
+
+
+def _get_quickpulse_last_process_time() -> float:
+ return _QUICKPULSE_LAST_PROCESS_TIME
+
+
+def _set_quickpulse_process_elapsed_time(time: datetime) -> None:
+ # pylint: disable=global-statement
+ global _QUICKPULSE_PROCESS_ELAPSED_TIME
+ _QUICKPULSE_PROCESS_ELAPSED_TIME = time
+
+
+def _get_quickpulse_process_elapsed_time() -> datetime:
+ return _QUICKPULSE_PROCESS_ELAPSED_TIME
+
+
+def _set_quickpulse_last_process_cpu(time: float) -> None:
+ # pylint: disable=global-statement
+ global _QUICKPULSE_LAST_PROCESS_CPU
+ _QUICKPULSE_LAST_PROCESS_CPU = time
+
+
+def _get_quickpulse_last_process_cpu() -> float:
+ return _QUICKPULSE_LAST_PROCESS_CPU
+
+
+def is_quickpulse_enabled() -> bool:
+ return _get_global_quickpulse_state() is not _QuickpulseState.OFFLINE
+
+
+def _is_ping_state() -> bool:
+ return _get_global_quickpulse_state() in (_QuickpulseState.PING_SHORT, _QuickpulseState.PING_LONG)
+
+
+def _is_post_state():
+ return _get_global_quickpulse_state() is _QuickpulseState.POST_SHORT
+
+
+def _append_quickpulse_document(document: DocumentIngress):
+ # pylint: disable=global-variable-not-assigned
+ global _QUICKPULSE_DOCUMENTS
+ # Limit risk of memory leak by limiting doc length to something manageable
+ if len(_QUICKPULSE_DOCUMENTS) > 20:
+ try:
+ _QUICKPULSE_DOCUMENTS.pop(0)
+ except IndexError:
+ pass
+ _QUICKPULSE_DOCUMENTS.append(document)
+
+
+def _get_and_clear_quickpulse_documents() -> List[DocumentIngress]:
+ # pylint: disable=global-statement
+ global _QUICKPULSE_DOCUMENTS
+ documents = list(_QUICKPULSE_DOCUMENTS)
+ _QUICKPULSE_DOCUMENTS = []
+ return documents
+
+
+# Filtering
+
+
+# Used for etag configuration
+def _set_quickpulse_etag(etag: str) -> None:
+ # pylint: disable=global-statement
+ global _QUICKPULSE_ETAG
+ _QUICKPULSE_ETAG = etag
+
+
+def _get_quickpulse_etag() -> str:
+ return _QUICKPULSE_ETAG
+
+
+# Used for updating metric filter configuration when etag has changed
+# Contains filter and projection of metrics to apply for each telemetry type if exists
+def _set_quickpulse_derived_metric_infos(filters: Dict[TelemetryType, List[DerivedMetricInfo]]) -> None:
+ # pylint: disable=global-statement
+ global _QUICKPULSE_DERIVED_METRIC_INFOS
+ _QUICKPULSE_DERIVED_METRIC_INFOS = filters
+
+
+def _get_quickpulse_derived_metric_infos() -> Dict[TelemetryType, List[DerivedMetricInfo]]:
+ return _QUICKPULSE_DERIVED_METRIC_INFOS
+
+
+# Used for initializing and setting projections when span/logs are recorded
+def _set_quickpulse_projection_map(metric_id: str, aggregation_type: AggregationType, value: float, count: int):
+ # pylint: disable=global-variable-not-assigned
+ global _QUICKPULSE_PROJECTION_MAP
+ _QUICKPULSE_PROJECTION_MAP[metric_id] = (aggregation_type, value, count)
+
+
+def _get_quickpulse_projection_map() -> Dict[str, Tuple[AggregationType, float, int]]:
+ return _QUICKPULSE_PROJECTION_MAP
+
+
+# Resets projections per derived metric info for next quickpulse interval
+# Called processing of previous quickpulse projections are finished/exported
+def _reset_quickpulse_projection_map():
+ # pylint: disable=global-statement
+ global _QUICKPULSE_PROJECTION_MAP
+ new_map = {}
+ if _QUICKPULSE_PROJECTION_MAP:
+ for id, projection in _QUICKPULSE_PROJECTION_MAP.items():
+ value = 0
+ if projection[0] == AggregationType.MIN:
+ value = _QUICKPULSE_PROJECTION_MAX_VALUE
+ elif projection[0] == AggregationType.MAX:
+ value = _QUICKPULSE_PROJECTION_MIN_VALUE
+ new_map[id] = (projection[0], value, 0)
+ _QUICKPULSE_PROJECTION_MAP.clear()
+ _QUICKPULSE_PROJECTION_MAP = new_map
+
+
+# clears the projection map, usually called when config changes
+def _clear_quickpulse_projection_map():
+ # pylint: disable=global-variable-not-assigned
+ global _QUICKPULSE_PROJECTION_MAP
+ _QUICKPULSE_PROJECTION_MAP.clear()
+
+
+# Used for updating doc filter configuration when etag has changed
+# Contains filter and projection of docs to apply for each telemetry type if exists
+# Format is Dict[TelemetryType, Dict[stream.id, List[FilterConjunctionGroupInfo]]]
+def _set_quickpulse_doc_stream_infos(filters: Dict[TelemetryType, Dict[str, List[FilterConjunctionGroupInfo]]]) -> None:
+ # pylint: disable=global-statement
+ global _QUICKPULSE_DOC_STREAM_INFOS
+ _QUICKPULSE_DOC_STREAM_INFOS = filters
+
+
+def _get_quickpulse_doc_stream_infos() -> Dict[TelemetryType, Dict[str, List[FilterConjunctionGroupInfo]]]:
+ return _QUICKPULSE_DOC_STREAM_INFOS
diff --git a/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_types.py b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_types.py
new file mode 100644
index 00000000..810c3f91
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_types.py
@@ -0,0 +1,235 @@
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License.
+# pylint: disable=protected-access
+from dataclasses import dataclass, fields
+from typing import Dict, no_type_check
+
+from opentelemetry.sdk._logs import LogRecord
+from opentelemetry.sdk.trace import Event, ReadableSpan
+from opentelemetry.semconv._incubating.attributes import gen_ai_attributes
+from opentelemetry.semconv.attributes.http_attributes import (
+ HTTP_REQUEST_METHOD,
+ HTTP_RESPONSE_STATUS_CODE,
+)
+from opentelemetry.semconv.trace import SpanAttributes
+from opentelemetry.trace import SpanKind
+
+from azure.monitor.opentelemetry.exporter.export.trace import _utils as trace_utils
+
+
+@dataclass
+class _TelemetryData:
+ custom_dimensions: Dict[str, str]
+
+ @staticmethod
+ def _from_span(span: ReadableSpan):
+ if span.kind in (SpanKind.SERVER, SpanKind.CONSUMER):
+ return _RequestData._from_span(span)
+ return _DependencyData._from_span(span)
+
+ @staticmethod
+ @no_type_check
+ def _from_log_record(log_record: LogRecord):
+ exc_type = log_record.attributes.get(SpanAttributes.EXCEPTION_TYPE)
+ exc_message = log_record.attributes.get(SpanAttributes.EXCEPTION_MESSAGE)
+ if exc_type is not None or exc_message is not None:
+ return _ExceptionData._from_log_record(log_record)
+ return _TraceData._from_log_record(log_record)
+
+
+@dataclass
+class _RequestData(_TelemetryData):
+ duration: float
+ success: bool
+ name: str
+ response_code: int
+ url: str
+
+ @staticmethod
+ @no_type_check
+ def _from_span(span: ReadableSpan):
+ # Logic should match that of exporter to Breeze
+ url = ""
+ duration_ms = 0
+ response_code = 0
+ success = True
+ attributes = {}
+ if span.end_time and span.start_time:
+ duration_ms = (span.end_time - span.start_time) / 1e9
+ if span.attributes:
+ attributes = span.attributes
+ url = trace_utils._get_url_for_http_request(attributes)
+ status_code = attributes.get(HTTP_RESPONSE_STATUS_CODE) or \
+ attributes.get(SpanAttributes.HTTP_STATUS_CODE)
+ if status_code:
+ try:
+ status_code = int(status_code)
+ except ValueError:
+ status_code = 0
+ else:
+ status_code = 0
+ success = span.status.is_ok and status_code and status_code not in range(400, 500)
+ response_code = status_code
+ return _RequestData(
+ duration=duration_ms,
+ success=success,
+ name=span.name,
+ response_code=response_code,
+ url=url or "",
+ custom_dimensions=attributes,
+ )
+
+
+@dataclass
+class _DependencyData(_TelemetryData):
+ duration: float
+ success: bool
+ name: str
+ result_code: int
+ target: str
+ type: str
+ data: str
+
+ @staticmethod
+ @no_type_check
+ def _from_span(span: ReadableSpan):
+ # Logic should match that of exporter to Breeze
+ url = ""
+ duration_ms = 0
+ result_code = 0
+ attributes = {}
+ dependency_type = "InProc"
+ data = ""
+ target = ""
+ if span.end_time and span.start_time:
+ duration_ms = (span.end_time - span.start_time) / 1e9
+ if span.attributes:
+ attributes = span.attributes
+ target = trace_utils._get_target_for_dependency_from_peer(attributes)
+ if span.kind is SpanKind.CLIENT:
+ if HTTP_REQUEST_METHOD in attributes or SpanAttributes.HTTP_METHOD in attributes:
+ dependency_type = "HTTP"
+ url = trace_utils._get_url_for_http_dependency(attributes)
+ target, _ = trace_utils._get_target_and_path_for_http_dependency(
+ attributes,
+ url,
+ )
+ data = url
+ elif SpanAttributes.DB_SYSTEM in attributes:
+ db_system = attributes[SpanAttributes.DB_SYSTEM]
+ dependency_type = db_system
+ target = trace_utils._get_target_for_db_dependency(
+ target,
+ db_system,
+ attributes,
+ )
+ if SpanAttributes.DB_STATEMENT in attributes:
+ data = attributes[SpanAttributes.DB_STATEMENT]
+ elif SpanAttributes.DB_OPERATION in attributes:
+ data = attributes[SpanAttributes.DB_OPERATION]
+ elif SpanAttributes.MESSAGING_SYSTEM in attributes:
+ dependency_type = attributes[SpanAttributes.MESSAGING_SYSTEM]
+ target = trace_utils._get_target_for_messaging_dependency(
+ target,
+ attributes,
+ )
+ elif SpanAttributes.RPC_SYSTEM in attributes:
+ dependency_type = attributes[SpanAttributes.RPC_SYSTEM]
+ target = trace_utils._get_target_for_rpc_dependency(
+ target,
+ attributes,
+ )
+ elif gen_ai_attributes.GEN_AI_SYSTEM in span.attributes:
+ dependency_type = attributes[gen_ai_attributes.GEN_AI_SYSTEM]
+ elif span.kind is SpanKind.PRODUCER:
+ dependency_type = "Queue Message"
+ msg_system = attributes.get(SpanAttributes.MESSAGING_SYSTEM)
+ if msg_system:
+ dependency_type += " | {}".format(msg_system)
+ else:
+ dependency_type = "InProc"
+
+ return _DependencyData(
+ duration=duration_ms,
+ success=span.status.is_ok,
+ name=span.name,
+ result_code=result_code,
+ target=target,
+ type=str(dependency_type),
+ data=data,
+ custom_dimensions=attributes,
+ )
+
+
+@dataclass
+class _ExceptionData(_TelemetryData):
+ message: str
+ stack_trace: str
+
+ @staticmethod
+ @no_type_check
+ def _from_log_record(log_record: LogRecord):
+ return _ExceptionData(
+ message=str(log_record.attributes.get(SpanAttributes.EXCEPTION_MESSAGE, "")),
+ stack_trace=str(log_record.attributes.get(SpanAttributes.EXCEPTION_STACKTRACE, "")),
+ custom_dimensions=log_record.attributes,
+ )
+
+ @staticmethod
+ @no_type_check
+ def _from_span_event(span_event: Event):
+ return _ExceptionData(
+ message=str(span_event.attributes.get(SpanAttributes.EXCEPTION_MESSAGE, "")),
+ stack_trace=str(span_event.attributes.get(SpanAttributes.EXCEPTION_STACKTRACE, "")),
+ custom_dimensions=span_event.attributes,
+ )
+
+
+@dataclass
+class _TraceData(_TelemetryData):
+ message: str
+
+ @staticmethod
+ @no_type_check
+ def _TraceData(log_record: LogRecord):
+ return _TraceData(
+ message=str(log_record.body),
+ custom_dimensions=log_record.attributes,
+ )
+
+ @staticmethod
+ @no_type_check
+ def _from_log_record(log_record: LogRecord):
+ return _TraceData(
+ message=str(log_record.body),
+ custom_dimensions=log_record.attributes,
+ )
+
+
+def _get_field_names(data_type: type):
+ field_map = {}
+ for field in fields(data_type):
+ field_map[field.name.replace("_", "").lower()] = field.name
+ return field_map
+
+
+_DEPENDENCY_DATA_FIELD_NAMES = _get_field_names(_DependencyData)
+_EXCEPTION_DATA_FIELD_NAMES = _get_field_names(_ExceptionData)
+_REQUEST_DATA_FIELD_NAMES = _get_field_names(_RequestData)
+_TRACE_DATA_FIELD_NAMES = _get_field_names(_TraceData)
+_DATA_FIELD_NAMES = {
+ _DependencyData: _DEPENDENCY_DATA_FIELD_NAMES,
+ _ExceptionData: _EXCEPTION_DATA_FIELD_NAMES,
+ _RequestData: _REQUEST_DATA_FIELD_NAMES,
+ _TraceData: _TRACE_DATA_FIELD_NAMES,
+}
+_KNOWN_STRING_FIELD_NAMES = (
+ "Url",
+ "Name",
+ "Target",
+ "Type",
+ "Data",
+ "Message",
+ "Exception.Message",
+ "Exception.StackTrace",
+)
diff --git a/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_utils.py b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_utils.py
new file mode 100644
index 00000000..fa6bcb9d
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_utils.py
@@ -0,0 +1,179 @@
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License.
+from datetime import datetime, timezone
+from typing import List, Optional, Tuple, Union
+
+from opentelemetry.sdk.metrics._internal.point import (
+ NumberDataPoint,
+ HistogramDataPoint,
+)
+from opentelemetry.sdk.metrics.export import MetricsData as OTMetricsData
+
+from azure.monitor.opentelemetry.exporter._quickpulse._constants import (
+ _QUICKPULSE_METRIC_NAME_MAPPINGS,
+ _QUICKPULSE_PROJECTION_MAX_VALUE,
+ _QUICKPULSE_PROJECTION_MIN_VALUE,
+)
+from azure.monitor.opentelemetry.exporter._quickpulse._generated.models import (
+ AggregationType,
+ DocumentIngress,
+ DocumentType,
+ Exception as ExceptionDocument,
+ MetricPoint,
+ MonitoringDataPoint,
+ RemoteDependency as RemoteDependencyDocument,
+ Request as RequestDocument,
+ Trace as TraceDocument,
+)
+from azure.monitor.opentelemetry.exporter._quickpulse._state import (
+ _get_quickpulse_projection_map,
+ _reset_quickpulse_projection_map,
+)
+from azure.monitor.opentelemetry.exporter._quickpulse._types import (
+ _DependencyData,
+ _ExceptionData,
+ _RequestData,
+ _TraceData,
+)
+
+
+def _metric_to_quick_pulse_data_points( # pylint: disable=too-many-nested-blocks
+ metrics_data: OTMetricsData,
+ base_monitoring_data_point: MonitoringDataPoint,
+ documents: Optional[List[DocumentIngress]],
+) -> List[MonitoringDataPoint]:
+ metric_points = []
+ for resource_metric in metrics_data.resource_metrics:
+ for scope_metric in resource_metric.scope_metrics:
+ for metric in scope_metric.metrics:
+ for point in metric.data.data_points:
+ if point is not None:
+ value = 0
+ if isinstance(point, HistogramDataPoint):
+ if point.count > 0:
+ value = point.sum / point.count
+ elif isinstance(point, NumberDataPoint):
+ value = point.value
+ metric_point = MetricPoint(
+ name=_QUICKPULSE_METRIC_NAME_MAPPINGS[metric.name.lower()], # type: ignore
+ weight=1,
+ value=value,
+ )
+ metric_points.append(metric_point)
+ # Process filtered metrics
+ for metric in _get_metrics_from_projections():
+ metric_point = MetricPoint(
+ name=metric[0], # type: ignore
+ weight=1,
+ value=metric[1], # type: ignore
+ )
+ metric_points.append(metric_point)
+
+ # Reset projection map for next collection cycle
+ _reset_quickpulse_projection_map()
+
+ return [
+ MonitoringDataPoint(
+ version=base_monitoring_data_point.version,
+ invariant_version=base_monitoring_data_point.invariant_version,
+ instance=base_monitoring_data_point.instance,
+ role_name=base_monitoring_data_point.role_name,
+ machine_name=base_monitoring_data_point.machine_name,
+ stream_id=base_monitoring_data_point.stream_id,
+ is_web_app=base_monitoring_data_point.is_web_app,
+ performance_collection_supported=base_monitoring_data_point.performance_collection_supported,
+ timestamp=datetime.now(tz=timezone.utc),
+ metrics=metric_points,
+ documents=documents,
+ )
+ ]
+
+
+# mypy: disable-error-code="assignment,union-attr"
+def _get_span_document(data: Union[_DependencyData, _RequestData]) -> Union[RemoteDependencyDocument, RequestDocument]:
+ if isinstance(data, _DependencyData):
+ document = RemoteDependencyDocument(
+ document_type=DocumentType.REMOTE_DEPENDENCY,
+ name=data.name,
+ command_name=data.data,
+ result_code=str(data.result_code),
+ duration=_ms_to_iso8601_string(data.duration),
+ )
+ else:
+ document = RequestDocument(
+ document_type=DocumentType.REQUEST,
+ name=data.name,
+ url=data.url,
+ response_code=str(data.response_code),
+ duration=_ms_to_iso8601_string(data.duration),
+ )
+ return document
+
+
+# mypy: disable-error-code="assignment"
+def _get_log_record_document(data: Union[_ExceptionData, _TraceData], exc_type: Optional[str] = None) -> Union[ExceptionDocument, TraceDocument]: # pylint: disable=C0301
+ if isinstance(data, _ExceptionData):
+ document = ExceptionDocument(
+ document_type=DocumentType.EXCEPTION,
+ exception_type=exc_type or "",
+ exception_message=data.message,
+ )
+ else:
+ document = TraceDocument(
+ document_type=DocumentType.TRACE,
+ message=data.message,
+ )
+ return document
+
+
+# Gets filtered metrics from projections to be exported
+# Called every second on export
+def _get_metrics_from_projections() -> List[Tuple[str, float]]:
+ metrics = []
+ projection_map = _get_quickpulse_projection_map()
+ for id, projection in projection_map.items():
+ metric_value = 0
+ aggregation_type = projection[0]
+ if aggregation_type == AggregationType.MIN:
+ metric_value = 0 if projection[1] == _QUICKPULSE_PROJECTION_MAX_VALUE else projection[1]
+ elif aggregation_type == AggregationType.MAX:
+ metric_value = 0 if projection[1] == _QUICKPULSE_PROJECTION_MIN_VALUE else projection[1]
+ elif aggregation_type == AggregationType.AVG:
+ metric_value = 0 if projection[2] == 0 else projection[1] / float(projection[2])
+ elif aggregation_type == AggregationType.SUM:
+ metric_value = projection[1]
+ metrics.append((id, metric_value))
+ return metrics # type: ignore
+
+
+# Time
+
+def _ms_to_iso8601_string(ms: float) -> str:
+ seconds, ms = divmod(ms, 1000)
+ minutes, seconds = divmod(seconds, 60)
+ hours, minutes = divmod(minutes, 60)
+ days, hours = divmod(hours, 24)
+ years, days = divmod(days, 365)
+ months, days = divmod(days, 30)
+ duration = f"P{years}Y{months}M{days}DT{hours}H{minutes}M{seconds}.{int(ms):03d}S"
+ return duration
+
+
+def _filter_time_stamp_to_ms(time_stamp: str) -> Optional[int]:
+ # The service side will return a timestamp in the following format:
+ # [days].[hours]:[minutes]:[seconds]
+ # the seconds may be a whole number or something like 7.89. 7.89 seconds translates to 7890 ms.
+ # examples: "14.6:56:7.89" = 1234567890 ms, "0.0:0:0.2" = 200 ms
+ total_milliseconds = None
+ try:
+ days_hours, minutes, seconds = time_stamp.split(":")
+ days, hours = map(float, days_hours.split("."))
+ total_milliseconds = int(
+ days * 24 * 60 * 60 * 1000 # days to milliseconds
+ + hours * 60 * 60 * 1000 # hours to milliseconds
+ + float(minutes) * 60 * 1000 # minutes to milliseconds
+ + float(seconds) * 1000 # seconds to milliseconds
+ )
+ except Exception: # pylint: disable=broad-except
+ pass
+ return total_milliseconds
diff --git a/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_validate.py b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_validate.py
new file mode 100644
index 00000000..6d760117
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_quickpulse/_validate.py
@@ -0,0 +1,139 @@
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License.
+
+from azure.monitor.opentelemetry.exporter._quickpulse._generated.models import (
+ DerivedMetricInfo,
+ DocumentFilterConjunctionGroupInfo,
+ FilterInfo,
+ PredicateType,
+ TelemetryType,
+)
+from azure.monitor.opentelemetry.exporter._quickpulse._types import (
+ _DEPENDENCY_DATA_FIELD_NAMES,
+ _KNOWN_STRING_FIELD_NAMES,
+ _REQUEST_DATA_FIELD_NAMES,
+)
+from azure.monitor.opentelemetry.exporter._quickpulse._utils import _filter_time_stamp_to_ms
+
+
+def _validate_derived_metric_info(metric_info: DerivedMetricInfo) -> bool:
+ if not _validate_telemetry_type(metric_info.telemetry_type):
+ return False
+ if not _validate_custom_metric_projection(metric_info):
+ return False
+ # Validate filters
+ for filter_group in metric_info.filter_groups:
+ for filter in filter_group.filters:
+ # Validate field names to telemetry type
+ # Validate predicate and comparands
+ if not _validate_filter_field_name(filter.field_name, metric_info.telemetry_type) or not \
+ _validate_filter_predicate_and_comparand(filter):
+ return False
+ return True
+
+
+def _validate_document_filter_group_info(doc_filter_group: DocumentFilterConjunctionGroupInfo) -> bool:
+ if not _validate_telemetry_type(doc_filter_group.telemetry_type):
+ return False
+ # Validate filters
+ for filter in doc_filter_group.filters.filters:
+ # Validate field names to telemetry type
+ # Validate predicate and comparands
+ if not _validate_filter_field_name(filter.field_name, doc_filter_group.telemetry_type) or not \
+ _validate_filter_predicate_and_comparand(filter):
+ return False
+ return True
+
+
+def _validate_telemetry_type(telemetry_type: str) -> bool:
+ # Validate telemetry type
+ try:
+ telemetry_type = TelemetryType(telemetry_type)
+ except Exception: # pylint: disable=broad-except
+ return False
+ # Only REQUEST, DEPENDENCY, EXCEPTION, TRACE are supported
+ # No filtering options in UX for PERFORMANCE_COUNTERS
+ if telemetry_type not in (
+ TelemetryType.REQUEST,
+ TelemetryType.DEPENDENCY,
+ TelemetryType.EXCEPTION,
+ TelemetryType.TRACE,
+ ):
+ return False
+ return True
+
+
+def _validate_custom_metric_projection(metric_info: DerivedMetricInfo) -> bool:
+ # Check for CustomMetric projection
+ if metric_info.projection and metric_info.projection.startswith("CustomMetrics."):
+ return False
+ return True
+
+
+# pylint: disable=R0911
+def _validate_filter_field_name(name: str, telemetry_type: str) -> bool:
+ if not name:
+ return False
+ if name.startswith("CustomMetrics."):
+ return False
+ if name.startswith("CustomDimensions.") or name == "*":
+ return True
+ name = name.lower()
+ if telemetry_type == TelemetryType.DEPENDENCY.value:
+ if name not in _DEPENDENCY_DATA_FIELD_NAMES:
+ return False
+ elif telemetry_type == TelemetryType.REQUEST.value:
+ if name not in _REQUEST_DATA_FIELD_NAMES:
+ return False
+ elif telemetry_type == TelemetryType.EXCEPTION.value:
+ if name not in ("exception.message", "exception.stacktrace"):
+ return False
+ elif telemetry_type == TelemetryType.TRACE.value:
+ if name != "message":
+ return False
+ else:
+ return True
+ return True
+
+
+# pylint: disable=R0911
+def _validate_filter_predicate_and_comparand(filter: FilterInfo) -> bool:
+ name = filter.field_name
+ comparand = filter.comparand
+ # Validate predicate type
+ try:
+ predicate = PredicateType(filter.predicate)
+ except Exception: # pylint: disable=broad-except
+ return False
+ if not comparand:
+ return False
+ if name == "*" and predicate not in (PredicateType.CONTAINS, PredicateType.DOES_NOT_CONTAIN):
+ return False
+ if name in ("ResultCode", "ResponseCode", "Duration"):
+ if predicate in (PredicateType.CONTAINS, PredicateType.DOES_NOT_CONTAIN):
+ return False
+ if name == "Duration":
+ # Duration comparand should be a string timestamp
+ if _filter_time_stamp_to_ms(comparand) is None:
+ return False
+ else:
+ try:
+ # Response/ResultCode comparand should be interpreted as integer
+ int(comparand)
+ except Exception: # pylint: disable=broad-except
+ return False
+ elif name == "Success":
+ if predicate not in (PredicateType.EQUAL, PredicateType.NOT_EQUAL):
+ return False
+ comparand = comparand.lower()
+ if comparand not in ("true", "false"):
+ return False
+ elif name in _KNOWN_STRING_FIELD_NAMES or name.startswith("CustomDimensions."):
+ if predicate in (
+ PredicateType.GREATER_THAN,
+ PredicateType.GREATER_THAN_OR_EQUAL,
+ PredicateType.LESS_THAN,
+ PredicateType.LESS_THAN_OR_EQUAL,
+ ):
+ return False
+ return True
diff --git a/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_storage.py b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_storage.py
new file mode 100644
index 00000000..efa34478
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_storage.py
@@ -0,0 +1,215 @@
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License.
+
+import datetime
+import json
+import logging
+import os
+import random
+
+from azure.monitor.opentelemetry.exporter._utils import PeriodicTask
+
+logger = logging.getLogger(__name__)
+
+
+def _fmt(timestamp):
+ return timestamp.strftime("%Y-%m-%dT%H%M%S.%f")
+
+
+def _now():
+ return datetime.datetime.now(tz=datetime.timezone.utc)
+
+
+def _seconds(seconds):
+ return datetime.timedelta(seconds=seconds)
+
+
+# pylint: disable=broad-except
+class LocalFileBlob:
+ def __init__(self, fullpath):
+ self.fullpath = fullpath
+
+ def delete(self):
+ try:
+ os.remove(self.fullpath)
+ except Exception:
+ pass # keep silent
+
+ def get(self):
+ try:
+ with open(self.fullpath, "r", encoding="utf-8") as file:
+ return tuple(json.loads(line.strip()) for line in file.readlines())
+ except Exception:
+ pass # keep silent
+ return None
+
+ def put(self, data, lease_period=0):
+ try:
+ fullpath = self.fullpath + ".tmp"
+ with open(fullpath, "w", encoding="utf-8") as file:
+ for item in data:
+ file.write(json.dumps(item))
+ # The official Python doc: Do not use os.linesep as a line
+ # terminator when writing files opened in text mode (the
+ # default); use a single '\n' instead, on all platforms.
+ file.write("\n")
+ if lease_period:
+ timestamp = _now() + _seconds(lease_period)
+ self.fullpath += "@{}.lock".format(_fmt(timestamp))
+ os.rename(fullpath, self.fullpath)
+ return self
+ except Exception:
+ pass # keep silent
+ return None
+
+ def lease(self, period):
+ timestamp = _now() + _seconds(period)
+ fullpath = self.fullpath
+ if fullpath.endswith(".lock"):
+ fullpath = fullpath[: fullpath.rindex("@")]
+ fullpath += "@{}.lock".format(_fmt(timestamp))
+ try:
+ os.rename(self.fullpath, fullpath)
+ except Exception:
+ return None
+ self.fullpath = fullpath
+ return self
+
+
+# pylint: disable=broad-except
+class LocalFileStorage:
+ def __init__(
+ self,
+ path,
+ max_size=50 * 1024 * 1024, # 50MiB
+ maintenance_period=60, # 1 minute
+ retention_period=48 * 60 * 60, # 48 hours
+ write_timeout=60, # 1 minute,
+ name=None,
+ lease_period=60, # 1 minute
+ ):
+ self._path = os.path.abspath(path)
+ self._max_size = max_size
+ self._retention_period = retention_period
+ self._write_timeout = write_timeout
+ self._maintenance_routine()
+ self._maintenance_task = PeriodicTask(
+ interval=maintenance_period,
+ function=self._maintenance_routine,
+ name=name,
+ )
+ self._lease_period = lease_period
+ self._maintenance_task.daemon = True
+ self._maintenance_task.start()
+
+ def close(self):
+ self._maintenance_task.cancel()
+ self._maintenance_task.join()
+
+ def __enter__(self):
+ return self
+
+ # pylint: disable=redefined-builtin
+ def __exit__(self, type, value, traceback):
+ self.close()
+
+ def _maintenance_routine(self):
+ try:
+ # pylint: disable=unused-variable
+ for blob in self.gets():
+ pass # keep silent
+ except Exception:
+ pass # keep silent
+
+ def gets(self):
+ now = _now()
+ lease_deadline = _fmt(now)
+ retention_deadline = _fmt(now - _seconds(self._retention_period))
+ timeout_deadline = _fmt(now - _seconds(self._write_timeout))
+ try:
+ for name in sorted(os.listdir(self._path)):
+ path = os.path.join(self._path, name)
+ if not os.path.isfile(path):
+ continue # skip if not a file
+ if path.endswith(".tmp"):
+ if name < timeout_deadline:
+ try:
+ os.remove(path) # TODO: log data loss
+ except Exception:
+ pass # keep silent
+ if path.endswith(".lock"):
+ if path[path.rindex("@") + 1 : -5] > lease_deadline:
+ continue # under lease
+ new_path = path[: path.rindex("@")]
+ try:
+ os.rename(path, new_path)
+ except Exception:
+ pass # keep silent
+ path = new_path
+ if path.endswith(".blob"):
+ if name < retention_deadline:
+ try:
+ os.remove(path) # TODO: log data loss
+ except Exception:
+ pass # keep silent
+ else:
+ yield LocalFileBlob(path)
+ except Exception:
+ pass # keep silent
+
+ def get(self):
+ cursor = self.gets()
+ try:
+ return next(cursor)
+ except StopIteration:
+ pass
+ return None
+
+ def put(self, data, lease_period=None):
+ # Create path if it doesn't exist
+ try:
+ if not os.path.isdir(self._path):
+ os.makedirs(self._path, exist_ok=True)
+ except Exception:
+ pass # keep silent
+ if not self._check_storage_size():
+ return None
+ blob = LocalFileBlob(
+ os.path.join(
+ self._path,
+ "{}-{}.blob".format(
+ _fmt(_now()),
+ "{:08x}".format(random.getrandbits(32)), # thread-safe random
+ ),
+ )
+ )
+ if lease_period is None:
+ lease_period = self._lease_period
+ return blob.put(data, lease_period=lease_period)
+
+ def _check_storage_size(self):
+ size = 0
+ # pylint: disable=unused-variable
+ for dirpath, dirnames, filenames in os.walk(self._path):
+ for filename in filenames:
+ path = os.path.join(dirpath, filename)
+ # skip if it is symbolic link
+ if not os.path.islink(path):
+ try:
+ size += os.path.getsize(path)
+ except OSError:
+ logger.error(
+ "Path %s does not exist or is inaccessible.",
+ path,
+ )
+ continue
+ if size >= self._max_size:
+ # pylint: disable=logging-format-interpolation
+ logger.warning(
+ "Persistent storage max capacity has been "
+ "reached. Currently at {}KB. Telemetry will be "
+ "lost. Please consider increasing the value of "
+ "'storage_max_size' in exporter config.".format(str(size / 1024))
+ )
+ return False
+ return True
diff --git a/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_utils.py b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_utils.py
new file mode 100644
index 00000000..588fe6e3
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_utils.py
@@ -0,0 +1,302 @@
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License.
+
+import datetime
+import locale
+from os import environ
+from os.path import isdir
+import platform
+import threading
+import time
+import warnings
+from typing import Callable, Dict, Any
+
+from opentelemetry.semconv.attributes.service_attributes import SERVICE_NAME
+from opentelemetry.semconv.resource import ResourceAttributes
+from opentelemetry.sdk.resources import Resource
+from opentelemetry.sdk.util import ns_to_iso_str
+from opentelemetry.util.types import Attributes
+
+from azure.core.pipeline.policies import BearerTokenCredentialPolicy
+from azure.monitor.opentelemetry.exporter._generated.models import ContextTagKeys, TelemetryItem
+from azure.monitor.opentelemetry.exporter._version import VERSION as ext_version
+from azure.monitor.opentelemetry.exporter._constants import (
+ _AKS_ARM_NAMESPACE_ID,
+ _DEFAULT_AAD_SCOPE,
+ _INSTRUMENTATIONS_BIT_MAP,
+ _FUNCTIONS_WORKER_RUNTIME,
+ _PYTHON_APPLICATIONINSIGHTS_ENABLE_TELEMETRY,
+ _WEBSITE_SITE_NAME,
+)
+
+
+opentelemetry_version = ""
+
+# Workaround for missing version file
+try:
+ from importlib.metadata import version
+
+ opentelemetry_version = version("opentelemetry-sdk")
+except ImportError:
+ # Temporary workaround for <Py3.8
+ # importlib-metadata causing issues in CI
+ import pkg_resources # type: ignore
+
+ opentelemetry_version = pkg_resources.get_distribution("opentelemetry-sdk").version
+
+
+# Azure App Service
+
+
+def _is_on_app_service():
+ return environ.get(_WEBSITE_SITE_NAME) is not None
+
+
+# Functions
+
+
+def _is_on_functions():
+ return environ.get(_FUNCTIONS_WORKER_RUNTIME) is not None
+
+
+# AKS
+
+
+def _is_on_aks():
+ return _AKS_ARM_NAMESPACE_ID in environ
+
+
+# Attach
+
+
+def _is_attach_enabled():
+ if _is_on_app_service():
+ return isdir("/agents/python/")
+ if _is_on_functions():
+ return environ.get(_PYTHON_APPLICATIONINSIGHTS_ENABLE_TELEMETRY) == "true"
+ return False
+
+
+def _get_sdk_version_prefix():
+ sdk_version_prefix = ""
+ rp = "u"
+ if _is_on_functions():
+ rp = "f"
+ elif _is_on_app_service():
+ rp = "a"
+ # TODO: Add VM scenario outside statsbeat
+ # elif _is_on_vm():
+ # rp = 'v'
+ elif _is_on_aks():
+ rp = "k"
+
+ os = "u"
+ system = platform.system()
+ if system == "Linux":
+ os = "l"
+ elif system == "Windows":
+ os = "w"
+
+ attach_type = "m"
+ if _is_attach_enabled():
+ attach_type = "i"
+ sdk_version_prefix = "{}{}{}_".format(rp, os, attach_type)
+
+ return sdk_version_prefix
+
+
+def _get_sdk_version():
+ return "{}py{}:otel{}:ext{}".format(
+ _get_sdk_version_prefix(), platform.python_version(), opentelemetry_version, ext_version
+ )
+
+
+def _getlocale():
+ try:
+ with warnings.catch_warnings():
+ # temporary work-around for https://github.com/python/cpython/issues/82986
+ # by continuing to use getdefaultlocale() even though it has been deprecated.
+ # we ignore the deprecation warnings to reduce noise
+ warnings.simplefilter("ignore", category=DeprecationWarning)
+ return locale.getdefaultlocale()[0]
+ except AttributeError:
+ # locale.getlocal() has issues on Windows: https://github.com/python/cpython/issues/82986
+ # Use this as a fallback if locale.getdefaultlocale() doesn't exist (>Py3.13)
+ return locale.getlocale()[0]
+
+
+azure_monitor_context = {
+ ContextTagKeys.AI_DEVICE_ID: platform.node(),
+ ContextTagKeys.AI_DEVICE_LOCALE: _getlocale(),
+ ContextTagKeys.AI_DEVICE_OS_VERSION: platform.version(),
+ ContextTagKeys.AI_DEVICE_TYPE: "Other",
+ ContextTagKeys.AI_INTERNAL_SDK_VERSION: _get_sdk_version(),
+}
+
+
+def ns_to_duration(nanoseconds: int) -> str:
+ value = (nanoseconds + 500000) // 1000000 # duration in milliseconds
+ value, milliseconds = divmod(value, 1000)
+ value, seconds = divmod(value, 60)
+ value, minutes = divmod(value, 60)
+ days, hours = divmod(value, 24)
+ return "{:d}.{:02d}:{:02d}:{:02d}.{:03d}".format(days, hours, minutes, seconds, milliseconds)
+
+
+# Replicate .netDateTime.Ticks(), which is the UTC time, expressed as the number
+# of 100-nanosecond intervals that have elapsed since 12:00:00 midnight on
+# January 1, 0001.
+def _ticks_since_dot_net_epoch():
+ # Since time.time() is the elapsed time since UTC January 1, 1970, we have
+ # to shift this start time, and then multiply by 10^7 to get the number of
+ # 100-nanosecond intervals
+ shift_time = int((datetime.datetime(1970, 1, 1, 0, 0, 0) - datetime.datetime(1, 1, 1, 0, 0, 0)).total_seconds()) * (
+ 10**7
+ )
+ # Add shift time to 100-ns intervals since time.time()
+ return int(time.time() * (10**7)) + shift_time
+
+
+_INSTRUMENTATIONS_BIT_MASK = 0
+_INSTRUMENTATIONS_BIT_MASK_LOCK = threading.Lock()
+
+
+def get_instrumentations():
+ return _INSTRUMENTATIONS_BIT_MASK
+
+
+def add_instrumentation(instrumentation_name: str):
+ with _INSTRUMENTATIONS_BIT_MASK_LOCK:
+ global _INSTRUMENTATIONS_BIT_MASK # pylint: disable=global-statement
+ instrumentation_bits = _INSTRUMENTATIONS_BIT_MAP.get(instrumentation_name, 0)
+ _INSTRUMENTATIONS_BIT_MASK |= instrumentation_bits
+
+
+def remove_instrumentation(instrumentation_name: str):
+ with _INSTRUMENTATIONS_BIT_MASK_LOCK:
+ global _INSTRUMENTATIONS_BIT_MASK # pylint: disable=global-statement
+ instrumentation_bits = _INSTRUMENTATIONS_BIT_MAP.get(instrumentation_name, 0)
+ _INSTRUMENTATIONS_BIT_MASK &= ~instrumentation_bits
+
+
+class PeriodicTask(threading.Thread):
+ """Thread that periodically calls a given function.
+
+ :type interval: int or float
+ :param interval: Seconds between calls to the function.
+
+ :type function: function
+ :param function: The function to call.
+
+ :type args: list
+ :param args: The args passed in while calling `function`.
+
+ :type kwargs: dict
+ :param args: The kwargs passed in while calling `function`.
+ """
+
+ def __init__(self, interval: int, function: Callable, *args: Any, **kwargs: Any):
+ super().__init__(name=kwargs.pop("name", None))
+ self.interval = interval
+ self.function = function
+ self.args = args or [] # type: ignore
+ self.kwargs = kwargs or {}
+ self.finished = threading.Event()
+
+ def run(self):
+ wait_time = self.interval
+ while not self.finished.wait(wait_time):
+ start_time = time.time()
+ self.function(*self.args, **self.kwargs)
+ elapsed_time = time.time() - start_time
+ wait_time = max(self.interval - elapsed_time, 0)
+
+ def cancel(self):
+ self.finished.set()
+
+
+def _create_telemetry_item(timestamp: int) -> TelemetryItem:
+ return TelemetryItem(
+ name="",
+ instrumentation_key="",
+ tags=dict(azure_monitor_context), # type: ignore
+ time=ns_to_iso_str(timestamp), # type: ignore
+ )
+
+
+def _populate_part_a_fields(resource: Resource):
+ tags = {}
+ if resource and resource.attributes:
+ service_name = resource.attributes.get(SERVICE_NAME)
+ service_namespace = resource.attributes.get(ResourceAttributes.SERVICE_NAMESPACE)
+ service_instance_id = resource.attributes.get(ResourceAttributes.SERVICE_INSTANCE_ID)
+ device_id = resource.attributes.get(ResourceAttributes.DEVICE_ID)
+ device_model = resource.attributes.get(ResourceAttributes.DEVICE_MODEL_NAME)
+ device_make = resource.attributes.get(ResourceAttributes.DEVICE_MANUFACTURER)
+ app_version = resource.attributes.get(ResourceAttributes.SERVICE_VERSION)
+ if service_name:
+ if service_namespace:
+ tags[ContextTagKeys.AI_CLOUD_ROLE] = str(service_namespace) + "." + str(service_name)
+ else:
+ tags[ContextTagKeys.AI_CLOUD_ROLE] = service_name # type: ignore
+ if service_instance_id:
+ tags[ContextTagKeys.AI_CLOUD_ROLE_INSTANCE] = service_instance_id # type: ignore
+ else:
+ tags[ContextTagKeys.AI_CLOUD_ROLE_INSTANCE] = platform.node() # hostname default
+ tags[ContextTagKeys.AI_INTERNAL_NODE_NAME] = tags[ContextTagKeys.AI_CLOUD_ROLE_INSTANCE]
+ if device_id:
+ tags[ContextTagKeys.AI_DEVICE_ID] = device_id # type: ignore
+ if device_model:
+ tags[ContextTagKeys.AI_DEVICE_MODEL] = device_model # type: ignore
+ if device_make:
+ tags[ContextTagKeys.AI_DEVICE_OEM_NAME] = device_make # type: ignore
+ if app_version:
+ tags[ContextTagKeys.AI_APPLICATION_VER] = app_version # type: ignore
+
+ return tags
+
+
+# pylint: disable=W0622
+def _filter_custom_properties(properties: Attributes, filter=None) -> Dict[str, str]:
+ truncated_properties: Dict[str, str] = {}
+ if not properties:
+ return truncated_properties
+ for key, val in properties.items():
+ # Apply filter function
+ if filter is not None:
+ if not filter(key, val):
+ continue
+ # Apply truncation rules
+ # Max key length is 150, value is 8192
+ if not key or len(key) > 150 or val is None:
+ continue
+ truncated_properties[key] = str(val)[:8192]
+ return truncated_properties
+
+
+def _get_auth_policy(credential, default_auth_policy, aad_audience=None):
+ if credential:
+ if hasattr(credential, "get_token"):
+ return BearerTokenCredentialPolicy(
+ credential,
+ _get_scope(aad_audience),
+ )
+ raise ValueError("Must pass in valid TokenCredential.")
+ return default_auth_policy
+
+
+def _get_scope(aad_audience=None):
+ # The AUDIENCE is a url that identifies Azure Monitor in a specific cloud
+ # (For example: "https://monitor.azure.com/").
+ # The SCOPE is the audience + the permission
+ # (For example: "https://monitor.azure.com//.default").
+ return _DEFAULT_AAD_SCOPE if not aad_audience else "{}/.default".format(aad_audience)
+
+
+class Singleton(type):
+ _instance = None
+
+ def __call__(cls, *args, **kwargs):
+ if not cls._instance:
+ cls._instance = super(Singleton, cls).__call__(*args, **kwargs)
+ return cls._instance
diff --git a/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_version.py b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_version.py
new file mode 100644
index 00000000..068e1fe2
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/_version.py
@@ -0,0 +1,8 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for
+# license information.
+# --------------------------------------------------------------------------
+
+VERSION = "1.0.0b35"
diff --git a/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/export/__init__.py b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/export/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/export/__init__.py
diff --git a/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/export/_base.py b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/export/_base.py
new file mode 100644
index 00000000..d3da36ba
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/export/_base.py
@@ -0,0 +1,435 @@
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License.
+import logging
+import os
+import tempfile
+import time
+from enum import Enum
+from typing import List, Optional, Any
+from urllib.parse import urlparse
+
+from azure.core.exceptions import HttpResponseError, ServiceRequestError
+from azure.core.pipeline.policies import (
+ ContentDecodePolicy,
+ HttpLoggingPolicy,
+ RedirectPolicy,
+ RequestIdPolicy,
+)
+from azure.monitor.opentelemetry.exporter._generated import AzureMonitorClient
+from azure.monitor.opentelemetry.exporter._generated._configuration import AzureMonitorClientConfiguration
+from azure.monitor.opentelemetry.exporter._generated.models import (
+ MessageData,
+ MetricsData,
+ MonitorDomain,
+ RemoteDependencyData,
+ RequestData,
+ TelemetryEventData,
+ TelemetryExceptionData,
+ TelemetryItem,
+)
+from azure.monitor.opentelemetry.exporter._constants import (
+ _AZURE_MONITOR_DISTRO_VERSION_ARG,
+ _INVALID_STATUS_CODES,
+ _REACHED_INGESTION_STATUS_CODES,
+ _REDIRECT_STATUS_CODES,
+ _REQ_DURATION_NAME,
+ _REQ_EXCEPTION_NAME,
+ _REQ_FAILURE_NAME,
+ _REQ_RETRY_NAME,
+ _REQ_SUCCESS_NAME,
+ _REQ_THROTTLE_NAME,
+ _RETRYABLE_STATUS_CODES,
+ _THROTTLE_STATUS_CODES,
+)
+from azure.monitor.opentelemetry.exporter._connection_string_parser import ConnectionStringParser
+from azure.monitor.opentelemetry.exporter._storage import LocalFileStorage
+from azure.monitor.opentelemetry.exporter._utils import _get_auth_policy
+from azure.monitor.opentelemetry.exporter.statsbeat._state import (
+ get_statsbeat_initial_success,
+ get_statsbeat_shutdown,
+ increment_and_check_statsbeat_failure_count,
+ is_statsbeat_enabled,
+ set_statsbeat_initial_success,
+)
+from azure.monitor.opentelemetry.exporter.statsbeat._utils import _update_requests_map
+
+logger = logging.getLogger(__name__)
+
+_AZURE_TEMPDIR_PREFIX = "Microsoft/AzureMonitor"
+_TEMPDIR_PREFIX = "opentelemetry-python-"
+_SERVICE_API_LATEST = "2020-09-15_Preview"
+
+
+class ExportResult(Enum):
+ SUCCESS = 0
+ FAILED_RETRYABLE = 1
+ FAILED_NOT_RETRYABLE = 2
+
+
+# pylint: disable=broad-except
+# pylint: disable=too-many-instance-attributes
+# pylint: disable=C0301
+class BaseExporter:
+ """Azure Monitor base exporter for OpenTelemetry."""
+
+ def __init__(self, **kwargs: Any) -> None:
+ """Azure Monitor base exporter for OpenTelemetry.
+
+ :keyword str api_version: The service API version used. Defaults to latest.
+ :keyword str connection_string: The connection string used for your Application Insights resource.
+ :keyword ManagedIdentityCredential/ClientSecretCredential credential: Token credential, such as ManagedIdentityCredential or ClientSecretCredential, used for Azure Active Directory (AAD) authentication. Defaults to None.
+ :keyword bool disable_offline_storage: Determines whether to disable storing failed telemetry records for retry. Defaults to `False`.
+ :keyword str storage_directory: Storage path in which to store retry files. Defaults to `<tempfile.gettempdir()>/opentelemetry-python-<your-instrumentation-key>`.
+ :rtype: None
+ """
+ parsed_connection_string = ConnectionStringParser(kwargs.get("connection_string"))
+
+ self._api_version = kwargs.get("api_version") or _SERVICE_API_LATEST
+ self._credential = kwargs.get("credential")
+ self._consecutive_redirects = 0 # To prevent circular redirects
+ self._disable_offline_storage = kwargs.get("disable_offline_storage", False)
+ self._endpoint = parsed_connection_string.endpoint
+ self._instrumentation_key = parsed_connection_string.instrumentation_key
+ self._aad_audience = parsed_connection_string.aad_audience
+ self._storage_maintenance_period = kwargs.get(
+ "storage_maintenance_period", 60
+ ) # Maintenance interval in seconds.
+ self._storage_max_size = kwargs.get(
+ "storage_max_size", 50 * 1024 * 1024
+ ) # Maximum size in bytes (default 50MiB)
+ self._storage_min_retry_interval = kwargs.get(
+ "storage_min_retry_interval", 60
+ ) # minimum retry interval in seconds
+ temp_suffix = self._instrumentation_key or ""
+ if "storage_directory" in kwargs:
+ self._storage_directory = kwargs.get("storage_directory")
+ elif not self._disable_offline_storage:
+ self._storage_directory = os.path.join(
+ tempfile.gettempdir(), _AZURE_TEMPDIR_PREFIX, _TEMPDIR_PREFIX + temp_suffix
+ )
+ else:
+ self._storage_directory = None
+ self._storage_retention_period = kwargs.get(
+ "storage_retention_period", 48 * 60 * 60
+ ) # Retention period in seconds (default 48 hrs)
+ self._timeout = kwargs.get("timeout", 10.0) # networking timeout in seconds
+ self._distro_version = kwargs.get(
+ _AZURE_MONITOR_DISTRO_VERSION_ARG, ""
+ ) # If set, indicates the exporter is instantiated via Azure monitor OpenTelemetry distro. Versions corresponds to distro version.
+
+ config = AzureMonitorClientConfiguration(self._endpoint, **kwargs)
+ policies = [
+ RequestIdPolicy(**kwargs),
+ config.headers_policy,
+ config.user_agent_policy,
+ config.proxy_policy,
+ ContentDecodePolicy(**kwargs),
+ # Handle redirects in exporter, set new endpoint if redirected
+ RedirectPolicy(permit_redirects=False),
+ config.retry_policy,
+ _get_auth_policy(self._credential, config.authentication_policy, self._aad_audience),
+ config.custom_hook_policy,
+ config.logging_policy,
+ # Explicitly disabling to avoid infinite loop of Span creation when data is exported
+ # DistributedTracingPolicy(**kwargs),
+ config.http_logging_policy or HttpLoggingPolicy(**kwargs),
+ ]
+
+ self.client = AzureMonitorClient(
+ host=self._endpoint, connection_timeout=self._timeout, policies=policies, **kwargs
+ )
+ self.storage = None
+ if not self._disable_offline_storage:
+ self.storage = LocalFileStorage(
+ path=self._storage_directory,
+ max_size=self._storage_max_size,
+ maintenance_period=self._storage_maintenance_period,
+ retention_period=self._storage_retention_period,
+ name="{} Storage".format(self.__class__.__name__),
+ lease_period=self._storage_min_retry_interval,
+ )
+ # specifies whether current exporter is used for collection of instrumentation metrics
+ self._instrumentation_collection = kwargs.get("instrumentation_collection", False)
+ # statsbeat initialization
+ if self._should_collect_stats():
+ # Import here to avoid circular dependencies
+ from azure.monitor.opentelemetry.exporter.statsbeat._statsbeat import collect_statsbeat_metrics
+
+ collect_statsbeat_metrics(self)
+
+ def _transmit_from_storage(self) -> None:
+ if not self.storage:
+ return
+ for blob in self.storage.gets():
+ # give a few more seconds for blob lease operation
+ # to reduce the chance of race (for perf consideration)
+ if blob.lease(self._timeout + 5):
+ envelopes = [_format_storage_telemetry_item(TelemetryItem.from_dict(x)) for x in blob.get()]
+ result = self._transmit(envelopes)
+ if result == ExportResult.FAILED_RETRYABLE:
+ blob.lease(1)
+ else:
+ blob.delete()
+
+ def _handle_transmit_from_storage(self, envelopes: List[TelemetryItem], result: ExportResult) -> None:
+ if self.storage:
+ if result == ExportResult.FAILED_RETRYABLE:
+ envelopes_to_store = [x.as_dict() for x in envelopes]
+ self.storage.put(envelopes_to_store)
+ elif result == ExportResult.SUCCESS:
+ # Try to send any cached events
+ self._transmit_from_storage()
+
+ # pylint: disable=too-many-branches
+ # pylint: disable=too-many-nested-blocks
+ # pylint: disable=too-many-statements
+ def _transmit(self, envelopes: List[TelemetryItem]) -> ExportResult:
+ """
+ Transmit the data envelopes to the ingestion service.
+
+ Returns an ExportResult, this function should never
+ throw an exception.
+ :param envelopes: The list of telemetry items to transmit.
+ :type envelopes: list of ~azure.monitor.opentelemetry.exporter._generated.models.TelemetryItem
+ :return: The result of the export.
+ :rtype: ~azure.monitor.opentelemetry.exporter.export._base._ExportResult
+ """
+ if len(envelopes) > 0:
+ result = ExportResult.SUCCESS
+ # Track whether or not exporter has successfully reached ingestion
+ # Currently only used for statsbeat exporter to detect shutdown cases
+ reach_ingestion = False
+ start_time = time.time()
+ try:
+ track_response = self.client.track(envelopes)
+ if not track_response.errors: # 200
+ self._consecutive_redirects = 0
+ if not self._is_stats_exporter():
+ logger.info(
+ "Transmission succeeded: Item received: %s. Items accepted: %s",
+ track_response.items_received,
+ track_response.items_accepted,
+ )
+ if self._should_collect_stats():
+ _update_requests_map(_REQ_SUCCESS_NAME[1], 1)
+ reach_ingestion = True
+ result = ExportResult.SUCCESS
+ else: # 206
+ reach_ingestion = True
+ resend_envelopes = []
+ for error in track_response.errors:
+ if _is_retryable_code(error.status_code):
+ resend_envelopes.append(envelopes[error.index]) # type: ignore
+ else:
+ if not self._is_stats_exporter():
+ logger.error(
+ "Data drop %s: %s %s.",
+ error.status_code,
+ error.message,
+ envelopes[error.index] if error.index is not None else "",
+ )
+ if self.storage and resend_envelopes:
+ envelopes_to_store = [x.as_dict() for x in resend_envelopes]
+ self.storage.put(envelopes_to_store, 0)
+ self._consecutive_redirects = 0
+ # Mark as not retryable because we already write to storage here
+ result = ExportResult.FAILED_NOT_RETRYABLE
+ except HttpResponseError as response_error:
+ # HttpResponseError is raised when a response is received
+ if _reached_ingestion_code(response_error.status_code):
+ reach_ingestion = True
+ if _is_retryable_code(response_error.status_code):
+ if self._should_collect_stats():
+ _update_requests_map(_REQ_RETRY_NAME[1], value=response_error.status_code)
+ result = ExportResult.FAILED_RETRYABLE
+ elif _is_throttle_code(response_error.status_code):
+ if self._should_collect_stats():
+ _update_requests_map(_REQ_THROTTLE_NAME[1], value=response_error.status_code)
+ result = ExportResult.FAILED_NOT_RETRYABLE
+ elif _is_redirect_code(response_error.status_code):
+ self._consecutive_redirects = self._consecutive_redirects + 1
+ # pylint: disable=W0212
+ if self._consecutive_redirects < self.client._config.redirect_policy.max_redirects: # type: ignore
+ if response_error.response and response_error.response.headers: # type: ignore
+ redirect_has_headers = True
+ location = response_error.response.headers.get("location") # type: ignore
+ url = urlparse(location)
+ else:
+ redirect_has_headers = False
+ if redirect_has_headers and url.scheme and url.netloc: # pylint: disable=E0606
+ # Change the host to the new redirected host
+ self.client._config.host = "{}://{}".format(url.scheme, url.netloc) # pylint: disable=W0212
+ # Attempt to export again
+ result = self._transmit(envelopes)
+ else:
+ if not self._is_stats_exporter():
+ logger.error(
+ "Error parsing redirect information.",
+ )
+ result = ExportResult.FAILED_NOT_RETRYABLE
+ else:
+ if not self._is_stats_exporter():
+ logger.error(
+ "Error sending telemetry because of circular redirects. "
+ "Please check the integrity of your connection string."
+ )
+ # If redirect but did not return, exception occurred
+ if self._should_collect_stats():
+ _update_requests_map(_REQ_EXCEPTION_NAME[1], value="Circular Redirect")
+ result = ExportResult.FAILED_NOT_RETRYABLE
+ else:
+ # Any other status code counts as failure (non-retryable)
+ # 400 - Invalid - The server cannot or will not process the request due to the invalid telemetry (invalid data, iKey, etc.)
+ # 404 - Ingestion is allowed only from stamp specific endpoint - must update connection string
+ if self._should_collect_stats():
+ _update_requests_map(_REQ_FAILURE_NAME[1], value=response_error.status_code)
+ if not self._is_stats_exporter():
+ logger.error(
+ "Non-retryable server side error: %s.",
+ response_error.message,
+ )
+ if _is_invalid_code(response_error.status_code):
+ # Shutdown statsbeat on invalid code from customer endpoint
+ # Import here to avoid circular dependencies
+ from azure.monitor.opentelemetry.exporter.statsbeat._statsbeat import (
+ shutdown_statsbeat_metrics,
+ )
+
+ shutdown_statsbeat_metrics()
+ result = ExportResult.FAILED_NOT_RETRYABLE
+ except ServiceRequestError as request_error:
+ # Errors when we're fairly sure that the server did not receive the
+ # request, so it should be safe to retry.
+ # ServiceRequestError is raised by azure.core for these cases
+ logger.warning("Retrying due to server request error: %s.", request_error.message)
+ if self._should_collect_stats():
+ exc_type = request_error.exc_type
+ if exc_type is None or exc_type is type(None):
+ exc_type = request_error.__class__.__name__ # type: ignore
+ _update_requests_map(_REQ_EXCEPTION_NAME[1], value=exc_type)
+ result = ExportResult.FAILED_RETRYABLE
+ except Exception as ex:
+ logger.exception("Envelopes could not be exported and are not retryable: %s.")
+ if self._should_collect_stats():
+ _update_requests_map(_REQ_EXCEPTION_NAME[1], value=ex.__class__.__name__)
+ result = ExportResult.FAILED_NOT_RETRYABLE
+ finally:
+ if self._should_collect_stats():
+ end_time = time.time()
+ _update_requests_map("count", 1)
+ _update_requests_map(_REQ_DURATION_NAME[1], value=end_time - start_time)
+ if self._is_statsbeat_initializing_state():
+ # Update statsbeat initial success state if reached ingestion
+ if reach_ingestion:
+ set_statsbeat_initial_success(True)
+ else:
+ # if didn't reach ingestion, increment and check if failure threshold
+ # has been reached during attempting statsbeat initialization
+ if increment_and_check_statsbeat_failure_count():
+ # Import here to avoid circular dependencies
+ from azure.monitor.opentelemetry.exporter.statsbeat._statsbeat import (
+ shutdown_statsbeat_metrics,
+ )
+
+ shutdown_statsbeat_metrics()
+ # pylint: disable=lost-exception
+ return ExportResult.FAILED_NOT_RETRYABLE # pylint: disable=W0134
+ # pylint: disable=lost-exception
+ return result # pylint: disable=W0134
+
+ # No spans to export
+ self._consecutive_redirects = 0
+ return ExportResult.SUCCESS
+
+ # check to see whether its the case of stats collection
+ def _should_collect_stats(self):
+ return (
+ is_statsbeat_enabled()
+ and not get_statsbeat_shutdown()
+ and not self._is_stats_exporter()
+ and not self._instrumentation_collection
+ )
+
+ # check to see if statsbeat is in "attempting to be initialized" state
+ def _is_statsbeat_initializing_state(self):
+ return self._is_stats_exporter() and not get_statsbeat_shutdown() and not get_statsbeat_initial_success()
+
+ def _is_stats_exporter(self):
+ return self.__class__.__name__ == "_StatsBeatExporter"
+
+
+def _is_invalid_code(response_code: Optional[int]) -> bool:
+ """Determine if response is a invalid response.
+
+ :param int response_code: HTTP response code
+ :return: True if response is a invalid response
+ :rtype: bool
+ """
+ return response_code in _INVALID_STATUS_CODES
+
+
+def _is_redirect_code(response_code: Optional[int]) -> bool:
+ """Determine if response is a redirect response.
+
+ :param int response_code: HTTP response code
+ :return: True if response is a redirect response
+ :rtype: bool
+ """
+ return response_code in _REDIRECT_STATUS_CODES
+
+
+def _is_retryable_code(response_code: Optional[int]) -> bool:
+ """Determine if response is retryable.
+
+ :param int response_code: HTTP response code
+ :return: True if response is retryable
+ :rtype: bool
+ """
+ return response_code in _RETRYABLE_STATUS_CODES
+
+
+def _is_throttle_code(response_code: Optional[int]) -> bool:
+ """Determine if response is throttle response.
+
+ :param int response_code: HTTP response code
+ :return: True if response is throttle response
+ :rtype: bool
+ """
+ return response_code in _THROTTLE_STATUS_CODES
+
+
+def _reached_ingestion_code(response_code: Optional[int]) -> bool:
+ """Determine if response indicates ingestion service has been reached.
+
+ :param int response_code: HTTP response code
+ :return: True if response indicates ingestion service has been reached
+ :rtype: bool
+ """
+ return response_code in _REACHED_INGESTION_STATUS_CODES
+
+
+_MONITOR_DOMAIN_MAPPING = {
+ "EventData": TelemetryEventData,
+ "ExceptionData": TelemetryExceptionData,
+ "MessageData": MessageData,
+ "MetricData": MetricsData,
+ "RemoteDependencyData": RemoteDependencyData,
+ "RequestData": RequestData,
+}
+
+
+# from_dict() deserializes incorrectly, format TelemetryItem correctly after it
+# is called
+def _format_storage_telemetry_item(item: TelemetryItem) -> TelemetryItem:
+ # After TelemetryItem.from_dict, all base_data fields are stored in
+ # additional_properties as a dict instead of in item.data.base_data itself
+ # item.data.base_data is also of type MonitorDomain instead of a child class
+ if hasattr(item, "data") and item.data is not None:
+ if hasattr(item.data, "base_data") and isinstance(item.data.base_data, MonitorDomain):
+ if hasattr(item.data, "base_type") and isinstance(item.data.base_type, str):
+ base_type = _MONITOR_DOMAIN_MAPPING.get(item.data.base_type)
+ # Apply deserialization of additional_properties and store that as base_data
+ if base_type:
+ item.data.base_data = base_type.from_dict(item.data.base_data.additional_properties) # type: ignore
+ item.data.base_data.additional_properties = None # type: ignore
+ return item
diff --git a/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/export/logs/__init__.py b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/export/logs/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/export/logs/__init__.py
diff --git a/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/export/logs/_exporter.py b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/export/logs/_exporter.py
new file mode 100644
index 00000000..e7ae4e89
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/export/logs/_exporter.py
@@ -0,0 +1,244 @@
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License.
+import json
+import logging
+from typing import Optional, Sequence, Any
+
+from opentelemetry._logs.severity import SeverityNumber
+from opentelemetry.semconv.attributes.exception_attributes import (
+ EXCEPTION_ESCAPED,
+ EXCEPTION_MESSAGE,
+ EXCEPTION_STACKTRACE,
+ EXCEPTION_TYPE,
+)
+from opentelemetry.sdk._logs import LogData
+from opentelemetry.sdk._logs.export import LogExporter, LogExportResult
+
+from azure.monitor.opentelemetry.exporter import _utils
+from azure.monitor.opentelemetry.exporter._constants import (
+ _EXCEPTION_ENVELOPE_NAME,
+ _MESSAGE_ENVELOPE_NAME,
+)
+from azure.monitor.opentelemetry.exporter._generated.models import (
+ ContextTagKeys,
+ MessageData,
+ MonitorBase,
+ TelemetryEventData,
+ TelemetryExceptionData,
+ TelemetryExceptionDetails,
+ TelemetryItem,
+)
+from azure.monitor.opentelemetry.exporter.export._base import (
+ BaseExporter,
+ ExportResult,
+)
+from azure.monitor.opentelemetry.exporter.export.trace import _utils as trace_utils
+from azure.monitor.opentelemetry.exporter._constants import (
+ _APPLICATION_INSIGHTS_EVENT_MARKER_ATTRIBUTE,
+ _MICROSOFT_CUSTOM_EVENT_NAME,
+)
+from azure.monitor.opentelemetry.exporter.statsbeat._state import (
+ get_statsbeat_shutdown,
+ get_statsbeat_custom_events_feature_set,
+ is_statsbeat_enabled,
+ set_statsbeat_custom_events_feature_set,
+)
+
+_logger = logging.getLogger(__name__)
+
+_DEFAULT_SPAN_ID = 0
+_DEFAULT_TRACE_ID = 0
+
+__all__ = ["AzureMonitorLogExporter"]
+
+
+class AzureMonitorLogExporter(BaseExporter, LogExporter):
+ """Azure Monitor Log exporter for OpenTelemetry."""
+
+ def export(self, batch: Sequence[LogData], **kwargs: Any) -> LogExportResult: # pylint: disable=unused-argument
+ """Export log data.
+
+ :param batch: OpenTelemetry LogData(s) to export.
+ :type batch: ~typing.Sequence[~opentelemetry._logs.LogData]
+ :return: The result of the export.
+ :rtype: ~opentelemetry.sdk._logs.export.LogData
+ """
+ envelopes = [self._log_to_envelope(log) for log in batch]
+ try:
+ result = self._transmit(envelopes)
+ self._handle_transmit_from_storage(envelopes, result)
+ return _get_log_export_result(result)
+ except Exception: # pylint: disable=broad-except
+ _logger.exception("Exception occurred while exporting the data.")
+ return _get_log_export_result(ExportResult.FAILED_NOT_RETRYABLE)
+
+ def shutdown(self) -> None:
+ """Shuts down the exporter.
+
+ Called when the SDK is shut down.
+ """
+ if self.storage:
+ self.storage.close()
+
+ def _log_to_envelope(self, log_data: LogData) -> TelemetryItem:
+ envelope = _convert_log_to_envelope(log_data)
+ envelope.instrumentation_key = self._instrumentation_key
+ return envelope
+
+ # pylint: disable=docstring-keyword-should-match-keyword-only
+ @classmethod
+ def from_connection_string(cls, conn_str: str, **kwargs: Any) -> "AzureMonitorLogExporter":
+ """
+ Create an AzureMonitorLogExporter from a connection string. This is the
+ recommended way of instantiation if a connection string is passed in
+ explicitly. If a user wants to use a connection string provided by
+ environment variable, the constructor of the exporter can be called
+ directly.
+
+ :param str conn_str: The connection string to be used for
+ authentication.
+ :keyword str api_version: The service API version used. Defaults to
+ latest.
+ :return: an instance of ~AzureMonitorLogExporter
+ :rtype: ~azure.monitor.opentelemetry.exporter.AzureMonitorLogExporter
+ """
+ return cls(connection_string=conn_str, **kwargs)
+
+
+def _log_data_is_event(log_data: LogData) -> bool:
+ log_record = log_data.log_record
+ is_event = None
+ if log_record.attributes:
+ is_event = log_record.attributes.get(_MICROSOFT_CUSTOM_EVENT_NAME) or \
+ log_record.attributes.get(_APPLICATION_INSIGHTS_EVENT_MARKER_ATTRIBUTE) # type: ignore
+ return is_event is not None
+
+
+# pylint: disable=protected-access
+def _convert_log_to_envelope(log_data: LogData) -> TelemetryItem:
+ log_record = log_data.log_record
+ time_stamp = log_record.timestamp if log_record.timestamp is not None else log_record.observed_timestamp
+ envelope = _utils._create_telemetry_item(time_stamp)
+ envelope.tags.update(_utils._populate_part_a_fields(log_record.resource)) # type: ignore
+ envelope.tags[ContextTagKeys.AI_OPERATION_ID] = "{:032x}".format( # type: ignore
+ log_record.trace_id or _DEFAULT_TRACE_ID
+ )
+ envelope.tags[ContextTagKeys.AI_OPERATION_PARENT_ID] = "{:016x}".format( # type: ignore
+ log_record.span_id or _DEFAULT_SPAN_ID
+ )
+ # Special use case: Customers want to be able to set location ip on log records
+ location_ip = trace_utils._get_location_ip(log_record.attributes)
+ if location_ip:
+ envelope.tags[ContextTagKeys.AI_LOCATION_IP] = location_ip # type: ignore
+ properties = _utils._filter_custom_properties(
+ log_record.attributes, lambda key, val: not _is_ignored_attribute(key)
+ )
+ exc_type = exc_message = stack_trace = None
+ if log_record.attributes:
+ exc_type = log_record.attributes.get(EXCEPTION_TYPE)
+ exc_message = log_record.attributes.get(EXCEPTION_MESSAGE)
+ stack_trace = log_record.attributes.get(EXCEPTION_STACKTRACE)
+ severity_level = _get_severity_level(log_record.severity_number)
+
+ # Exception telemetry
+ if exc_type is not None or exc_message is not None:
+ envelope.name = _EXCEPTION_ENVELOPE_NAME
+ has_full_stack = stack_trace is not None
+ if not exc_type:
+ exc_type = "Exception"
+ # Log body takes priority for message
+ if log_record.body:
+ message = _map_body_to_message(log_record.body)
+ elif exc_message:
+ message = exc_message # type: ignore
+ else:
+ message = "Exception"
+ exc_details = TelemetryExceptionDetails(
+ type_name=str(exc_type)[:1024], # type: ignore
+ message=str(message)[:32768],
+ has_full_stack=has_full_stack,
+ stack=str(stack_trace)[:32768],
+ )
+ data = TelemetryExceptionData( # type: ignore
+ severity_level=severity_level, # type: ignore
+ properties=properties,
+ exceptions=[exc_details],
+ )
+ envelope.data = MonitorBase(base_data=data, base_type="ExceptionData")
+ elif _log_data_is_event(log_data): # Event telemetry
+ _set_statsbeat_custom_events_feature()
+ envelope.name = "Microsoft.ApplicationInsights.Event"
+ event_name = ""
+ if log_record.attributes.get(_MICROSOFT_CUSTOM_EVENT_NAME): # type: ignore
+ event_name = str(log_record.attributes.get(_MICROSOFT_CUSTOM_EVENT_NAME)) # type: ignore
+ else:
+ event_name = _map_body_to_message(log_record.body)
+ data = TelemetryEventData( # type: ignore
+ name=event_name,
+ properties=properties,
+ )
+ envelope.data = MonitorBase(base_data=data, base_type="EventData")
+ else: # Message telemetry
+ envelope.name = _MESSAGE_ENVELOPE_NAME
+ # pylint: disable=line-too-long
+ # Severity number: https://github.com/open-telemetry/opentelemetry-specification/blob/main/specification/logs/data-model.md#field-severitynumber
+ data = MessageData( # type: ignore
+ message=_map_body_to_message(log_record.body),
+ severity_level=severity_level, # type: ignore
+ properties=properties,
+ )
+ envelope.data = MonitorBase(base_data=data, base_type="MessageData")
+
+ return envelope
+
+
+def _get_log_export_result(result: ExportResult) -> LogExportResult:
+ if result == ExportResult.SUCCESS:
+ return LogExportResult.SUCCESS
+ return LogExportResult.FAILURE
+
+
+# pylint: disable=line-too-long
+# Common schema: https://github.com/microsoft/common-schema/blob/main/v4.0/Mappings/AzureMonitor-AI.md#exceptionseveritylevel
+# SeverityNumber specs: https://github.com/open-telemetry/opentelemetry-specification/blob/main/specification/logs/data-model.md#field-severitynumber
+def _get_severity_level(severity_number: Optional[SeverityNumber]):
+ if severity_number is None or severity_number.value < 9:
+ return 0
+ return int((severity_number.value - 1) / 4 - 1)
+
+
+def _map_body_to_message(log_body: Any) -> str:
+ if not log_body:
+ return ""
+
+ if isinstance(log_body, str):
+ return log_body[:32768]
+
+ if isinstance(log_body, Exception):
+ return str(log_body)[:32768]
+
+ try:
+ return json.dumps(log_body)[:32768]
+ except: # pylint: disable=bare-except
+ return str(log_body)[:32768]
+
+
+def _is_ignored_attribute(key: str) -> bool:
+ return key in _IGNORED_ATTRS
+
+
+_IGNORED_ATTRS = frozenset(
+ (
+ EXCEPTION_TYPE,
+ EXCEPTION_MESSAGE,
+ EXCEPTION_STACKTRACE,
+ EXCEPTION_ESCAPED,
+ _APPLICATION_INSIGHTS_EVENT_MARKER_ATTRIBUTE,
+ _MICROSOFT_CUSTOM_EVENT_NAME,
+ )
+)
+
+
+def _set_statsbeat_custom_events_feature():
+ if is_statsbeat_enabled() and not get_statsbeat_shutdown() and not get_statsbeat_custom_events_feature_set():
+ set_statsbeat_custom_events_feature_set()
diff --git a/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/export/metrics/__init__.py b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/export/metrics/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/export/metrics/__init__.py
diff --git a/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/export/metrics/_exporter.py b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/export/metrics/_exporter.py
new file mode 100644
index 00000000..98ed6a47
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/export/metrics/_exporter.py
@@ -0,0 +1,291 @@
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License.
+import logging
+import os
+
+from typing import Dict, Optional, Union, Any
+
+from opentelemetry.util.types import Attributes
+from opentelemetry.sdk.metrics import (
+ Counter,
+ Histogram,
+ ObservableCounter,
+ ObservableGauge,
+ ObservableUpDownCounter,
+ UpDownCounter,
+)
+from opentelemetry.sdk.metrics.export import (
+ AggregationTemporality,
+ DataPointT,
+ HistogramDataPoint,
+ MetricExporter,
+ MetricExportResult,
+ MetricsData as OTMetricsData,
+ NumberDataPoint,
+)
+from opentelemetry.sdk.resources import Resource
+from opentelemetry.sdk.util.instrumentation import InstrumentationScope
+from opentelemetry.semconv.attributes.http_attributes import HTTP_RESPONSE_STATUS_CODE
+from opentelemetry.semconv.metrics import MetricInstruments
+from opentelemetry.semconv.metrics.http_metrics import (
+ HTTP_CLIENT_REQUEST_DURATION,
+ HTTP_SERVER_REQUEST_DURATION,
+)
+from opentelemetry.semconv.trace import SpanAttributes
+
+from azure.monitor.opentelemetry.exporter._constants import (
+ _APPLICATIONINSIGHTS_METRIC_NAMESPACE_OPT_IN,
+ _AUTOCOLLECTED_INSTRUMENT_NAMES,
+ _METRIC_ENVELOPE_NAME,
+)
+from azure.monitor.opentelemetry.exporter import _utils
+from azure.monitor.opentelemetry.exporter._generated.models import (
+ MetricDataPoint,
+ MetricsData,
+ MonitorBase,
+ TelemetryItem,
+)
+from azure.monitor.opentelemetry.exporter.export._base import (
+ BaseExporter,
+ ExportResult,
+)
+from azure.monitor.opentelemetry.exporter.export.trace import _utils as trace_utils
+
+
+_logger = logging.getLogger(__name__)
+
+__all__ = ["AzureMonitorMetricExporter"]
+
+
+APPLICATION_INSIGHTS_METRIC_TEMPORALITIES = {
+ Counter: AggregationTemporality.DELTA,
+ Histogram: AggregationTemporality.DELTA,
+ ObservableCounter: AggregationTemporality.DELTA,
+ ObservableGauge: AggregationTemporality.CUMULATIVE,
+ ObservableUpDownCounter: AggregationTemporality.CUMULATIVE,
+ UpDownCounter: AggregationTemporality.CUMULATIVE,
+}
+
+
+class AzureMonitorMetricExporter(BaseExporter, MetricExporter):
+ """Azure Monitor Metric exporter for OpenTelemetry."""
+
+ def __init__(self, **kwargs: Any) -> None:
+ BaseExporter.__init__(self, **kwargs)
+ MetricExporter.__init__(
+ self,
+ preferred_temporality=APPLICATION_INSIGHTS_METRIC_TEMPORALITIES, # type: ignore
+ preferred_aggregation=kwargs.get("preferred_aggregation"), # type: ignore
+ )
+
+ # pylint: disable=R1702
+ def export(
+ self,
+ metrics_data: OTMetricsData,
+ timeout_millis: float = 10_000,
+ **kwargs: Any,
+ ) -> MetricExportResult:
+ """Exports a batch of metric data
+
+ :param metrics_data: OpenTelemetry Metric(s) to export.
+ :type metrics_data: Sequence[~opentelemetry.sdk.metrics._internal.point.MetricsData]
+ :param timeout_millis: The maximum amount of time to wait for each export. Not currently used.
+ :type timeout_millis: float
+ :return: The result of the export.
+ :rtype: ~opentelemetry.sdk.metrics.export.MetricExportResult
+ """
+ envelopes = []
+ if metrics_data is None:
+ return MetricExportResult.SUCCESS
+ for resource_metric in metrics_data.resource_metrics:
+ for scope_metric in resource_metric.scope_metrics:
+ for metric in scope_metric.metrics:
+ for point in metric.data.data_points:
+ if point is not None:
+ envelope = self._point_to_envelope(
+ point,
+ metric.name,
+ resource_metric.resource,
+ scope_metric.scope,
+ )
+ if envelope is not None:
+ envelopes.append(envelope)
+ try:
+ result = self._transmit(envelopes)
+ self._handle_transmit_from_storage(envelopes, result)
+ return _get_metric_export_result(result)
+ except Exception: # pylint: disable=broad-except
+ _logger.exception("Exception occurred while exporting the data.")
+ return _get_metric_export_result(ExportResult.FAILED_NOT_RETRYABLE)
+
+ def force_flush(
+ self,
+ timeout_millis: float = 10_000,
+ ) -> bool:
+ # Ensure that export of any metrics currently received by the exporter are completed as soon as possible.
+
+ return True
+
+ def shutdown(
+ self,
+ timeout_millis: float = 30_000,
+ **kwargs: Any,
+ ) -> None:
+ """Shuts down the exporter.
+
+ Called when the SDK is shut down.
+
+ :param timeout_millis: The maximum amount of time to wait for shutdown. Not currently used.
+ :type timeout_millis: float
+ """
+ if self.storage:
+ self.storage.close()
+
+ def _point_to_envelope(
+ self,
+ point: DataPointT,
+ name: str,
+ resource: Optional[Resource] = None,
+ scope: Optional[InstrumentationScope] = None,
+ ) -> Optional[TelemetryItem]:
+ envelope = _convert_point_to_envelope(point, name, resource, scope)
+ if name in _AUTOCOLLECTED_INSTRUMENT_NAMES:
+ envelope = _handle_std_metric_envelope(envelope, name, point.attributes) # type: ignore
+ if envelope is not None:
+ envelope.instrumentation_key = self._instrumentation_key
+ return envelope
+
+ # pylint: disable=docstring-keyword-should-match-keyword-only
+ @classmethod
+ def from_connection_string(cls, conn_str: str, **kwargs: Any) -> "AzureMonitorMetricExporter":
+ """
+ Create an AzureMonitorMetricExporter from a connection string. This is
+ the recommended way of instantiation if a connection string is passed in
+ explicitly. If a user wants to use a connection string provided by
+ environment variable, the constructor of the exporter can be called
+ directly.
+
+ :param str conn_str: The connection string to be used for
+ authentication.
+ :keyword str api_version: The service API version used. Defaults to
+ latest.
+ :return: An instance of ~AzureMonitorMetricExporter
+ :rtype: ~azure.monitor.opentelemetry.exporter.AzureMonitorMetricExporter
+ """
+ return cls(connection_string=conn_str, **kwargs)
+
+
+# pylint: disable=protected-access
+def _convert_point_to_envelope(
+ point: DataPointT, name: str, resource: Optional[Resource] = None, scope: Optional[InstrumentationScope] = None
+) -> TelemetryItem:
+ envelope = _utils._create_telemetry_item(point.time_unix_nano)
+ envelope.name = _METRIC_ENVELOPE_NAME
+ envelope.tags.update(_utils._populate_part_a_fields(resource)) # type: ignore
+ namespace = None
+ if scope is not None and _is_metric_namespace_opted_in():
+ namespace = str(scope.name)[:256]
+ value: Union[int, float] = 0
+ count = 1
+ min_ = None
+ max_ = None
+ # std_dev = None
+
+ if isinstance(point, NumberDataPoint):
+ value = point.value
+ elif isinstance(point, HistogramDataPoint):
+ value = point.sum
+ count = int(point.count)
+ min_ = point.min
+ max_ = point.max
+
+ # truncation logic
+ properties = _utils._filter_custom_properties(point.attributes)
+
+ data_point = MetricDataPoint(
+ name=str(name)[:1024],
+ namespace=namespace,
+ value=value,
+ count=count,
+ min=min_,
+ max=max_,
+ )
+
+ data = MetricsData(
+ properties=properties,
+ metrics=[data_point],
+ )
+
+ envelope.data = MonitorBase(base_data=data, base_type="MetricData")
+
+ return envelope
+
+
+def _handle_std_metric_envelope(
+ envelope: TelemetryItem,
+ name: str,
+ attributes: Attributes,
+) -> Optional[TelemetryItem]:
+ properties: Dict[str, str] = {}
+ tags = envelope.tags
+ if not attributes:
+ attributes = {}
+ status_code = attributes.get(HTTP_RESPONSE_STATUS_CODE) or attributes.get(SpanAttributes.HTTP_STATUS_CODE)
+ if status_code:
+ try:
+ status_code = int(status_code) # type: ignore
+ except ValueError:
+ status_code = 0
+ else:
+ status_code = 0
+ if name in (HTTP_CLIENT_REQUEST_DURATION, MetricInstruments.HTTP_CLIENT_DURATION):
+ properties["_MS.MetricId"] = "dependencies/duration"
+ properties["_MS.IsAutocollected"] = "True"
+ properties["Dependency.Type"] = "http"
+ properties["Dependency.Success"] = str(_is_status_code_success(status_code)) # type: ignore
+ target, _ = trace_utils._get_target_and_path_for_http_dependency(attributes)
+ properties["dependency/target"] = target # type: ignore
+ properties["dependency/resultCode"] = str(status_code)
+ properties["cloud/roleInstance"] = tags["ai.cloud.roleInstance"] # type: ignore
+ properties["cloud/roleName"] = tags["ai.cloud.role"] # type: ignore
+ elif name in (HTTP_SERVER_REQUEST_DURATION, MetricInstruments.HTTP_SERVER_DURATION):
+ properties["_MS.MetricId"] = "requests/duration"
+ properties["_MS.IsAutocollected"] = "True"
+ properties["request/resultCode"] = str(status_code)
+ # TODO: Change to symbol once released in upstream
+ if attributes.get("user_agent.synthetic.type"):
+ properties["operation/synthetic"] = "True"
+ properties["cloud/roleInstance"] = tags["ai.cloud.roleInstance"] # type: ignore
+ properties["cloud/roleName"] = tags["ai.cloud.role"] # type: ignore
+ properties["Request.Success"] = str(_is_status_code_success(status_code)) # type: ignore
+ else:
+ # Any other autocollected metrics are not supported yet for standard metrics
+ # We ignore these envelopes in these cases
+ return None
+
+ # TODO: rpc, database, messaging
+
+ envelope.data.base_data.properties = properties # type: ignore
+
+ return envelope
+
+
+def _is_status_code_success(status_code: Optional[str]) -> bool:
+ if status_code is None or status_code == 0:
+ return False
+ try:
+ # Success criteria based solely off status code is True only if status_code < 400
+ # for both client and server spans
+ return int(status_code) < 400
+ except ValueError:
+ return False
+
+
+def _is_metric_namespace_opted_in() -> bool:
+ return os.environ.get(_APPLICATIONINSIGHTS_METRIC_NAMESPACE_OPT_IN, "False").lower() == "true"
+
+
+def _get_metric_export_result(result: ExportResult) -> MetricExportResult:
+ if result == ExportResult.SUCCESS:
+ return MetricExportResult.SUCCESS
+ return MetricExportResult.FAILURE
diff --git a/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/export/trace/__init__.py b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/export/trace/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/export/trace/__init__.py
diff --git a/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/export/trace/_exporter.py b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/export/trace/_exporter.py
new file mode 100644
index 00000000..c1d51b7d
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/export/trace/_exporter.py
@@ -0,0 +1,553 @@
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License.
+from os import environ
+import json
+import logging
+from time import time_ns
+from typing import no_type_check, Any, Dict, List, Sequence
+from urllib.parse import urlparse
+
+from opentelemetry.semconv.attributes.client_attributes import CLIENT_ADDRESS
+from opentelemetry.semconv.attributes.http_attributes import (
+ HTTP_REQUEST_METHOD,
+ HTTP_RESPONSE_STATUS_CODE,
+)
+from opentelemetry.semconv.trace import DbSystemValues, SpanAttributes
+from opentelemetry.semconv._incubating.attributes import gen_ai_attributes
+from opentelemetry.sdk.resources import Resource
+from opentelemetry.sdk.trace import ReadableSpan
+from opentelemetry.sdk.trace.export import SpanExporter, SpanExportResult
+from opentelemetry.trace import SpanKind, get_tracer_provider
+
+from azure.monitor.opentelemetry.exporter._constants import (
+ _APPLICATIONINSIGHTS_OPENTELEMETRY_RESOURCE_METRIC_DISABLED,
+ _AZURE_SDK_NAMESPACE_NAME,
+ _AZURE_SDK_OPENTELEMETRY_NAME,
+ _INSTRUMENTATION_SUPPORTING_METRICS_LIST,
+ _SAMPLE_RATE_KEY,
+ _METRIC_ENVELOPE_NAME,
+ _MESSAGE_ENVELOPE_NAME,
+ _REQUEST_ENVELOPE_NAME,
+ _EXCEPTION_ENVELOPE_NAME,
+ _REMOTE_DEPENDENCY_ENVELOPE_NAME,
+)
+from azure.monitor.opentelemetry.exporter import _utils
+from azure.monitor.opentelemetry.exporter._generated.models import (
+ ContextTagKeys,
+ MessageData,
+ MetricDataPoint,
+ MetricsData,
+ MonitorBase,
+ RemoteDependencyData,
+ RequestData,
+ TelemetryExceptionData,
+ TelemetryExceptionDetails,
+ TelemetryItem,
+)
+from azure.monitor.opentelemetry.exporter.export._base import (
+ BaseExporter,
+ ExportResult,
+)
+from . import _utils as trace_utils
+
+
+_logger = logging.getLogger(__name__)
+
+__all__ = ["AzureMonitorTraceExporter"]
+
+_STANDARD_OPENTELEMETRY_ATTRIBUTE_PREFIXES = [
+ "http.",
+ "db.",
+ "message.",
+ "messaging.",
+ "rpc.",
+ "enduser.",
+ "net.",
+ "peer.",
+ "exception.",
+ "thread.",
+ "fass.",
+ "code.",
+]
+
+_STANDARD_OPENTELEMETRY_HTTP_ATTRIBUTES = [
+ "client.address",
+ "client.port",
+ "server.address",
+ "server.port",
+ "url.full",
+ "url.path",
+ "url.query",
+ "url.scheme",
+ "url.template",
+ "error.type",
+ "network.local.address",
+ "network.local.port",
+ "network.protocol.name",
+ "network.peer.address",
+ "network.peer.port",
+ "network.protocol.version",
+ "network.transport",
+ "user_agent.original",
+ "user_agent.synthetic.type",
+]
+
+_STANDARD_AZURE_MONITOR_ATTRIBUTES = [
+ _SAMPLE_RATE_KEY,
+]
+
+
+class AzureMonitorTraceExporter(BaseExporter, SpanExporter):
+ """Azure Monitor Trace exporter for OpenTelemetry."""
+
+ def __init__(self, **kwargs: Any):
+ self._tracer_provider = kwargs.pop("tracer_provider", None)
+ super().__init__(**kwargs)
+
+ def export(
+ self, spans: Sequence[ReadableSpan], **kwargs: Any # pylint: disable=unused-argument
+ ) -> SpanExportResult:
+ """Export span data.
+
+ :param spans: Open Telemetry Spans to export.
+ :type spans: ~typing.Sequence[~opentelemetry.trace.Span]
+ :return: The result of the export.
+ :rtype: ~opentelemetry.sdk.trace.export.SpanExportResult
+ """
+ envelopes = []
+ if spans and self._should_collect_otel_resource_metric():
+ resource = None
+ try:
+ tracer_provider = self._tracer_provider or get_tracer_provider()
+ resource = tracer_provider.resource # type: ignore
+ envelopes.append(self._get_otel_resource_envelope(resource))
+ except AttributeError as e:
+ _logger.exception("Failed to derive Resource from Tracer Provider: %s", e)
+ for span in spans:
+ envelopes.append(self._span_to_envelope(span))
+ envelopes.extend(self._span_events_to_envelopes(span))
+ try:
+ result = self._transmit(envelopes)
+ self._handle_transmit_from_storage(envelopes, result)
+ return _get_trace_export_result(result)
+ except Exception: # pylint: disable=broad-except
+ _logger.exception("Exception occurred while exporting the data.")
+ return _get_trace_export_result(ExportResult.FAILED_NOT_RETRYABLE)
+
+ def shutdown(self) -> None:
+ """Shuts down the exporter.
+
+ Called when the SDK is shut down.
+ """
+ if self.storage:
+ self.storage.close()
+
+ # pylint: disable=protected-access
+ def _get_otel_resource_envelope(self, resource: Resource) -> TelemetryItem:
+ attributes: Dict[str, str] = {}
+ if resource:
+ attributes = resource.attributes
+ envelope = _utils._create_telemetry_item(time_ns())
+ envelope.name = _METRIC_ENVELOPE_NAME
+ envelope.tags.update(_utils._populate_part_a_fields(resource)) # pylint: disable=W0212
+ envelope.instrumentation_key = self._instrumentation_key
+ data_point = MetricDataPoint(
+ name="_OTELRESOURCE_"[:1024],
+ value=0,
+ )
+
+ data = MetricsData(
+ properties=attributes,
+ metrics=[data_point],
+ )
+
+ envelope.data = MonitorBase(base_data=data, base_type="MetricData")
+
+ return envelope
+
+ def _span_to_envelope(self, span: ReadableSpan) -> TelemetryItem:
+ envelope = _convert_span_to_envelope(span)
+ envelope.instrumentation_key = self._instrumentation_key
+ return envelope # type: ignore
+
+ def _span_events_to_envelopes(self, span: ReadableSpan) -> Sequence[TelemetryItem]:
+ if not span or len(span.events) == 0:
+ return []
+ envelopes = _convert_span_events_to_envelopes(span)
+ for envelope in envelopes:
+ envelope.instrumentation_key = self._instrumentation_key
+ return envelopes
+
+ def _should_collect_otel_resource_metric(self):
+ disabled = environ.get(_APPLICATIONINSIGHTS_OPENTELEMETRY_RESOURCE_METRIC_DISABLED)
+ return disabled is None or disabled.lower() != "true"
+
+ # pylint: disable=docstring-keyword-should-match-keyword-only
+ @classmethod
+ def from_connection_string(cls, conn_str: str, **kwargs: Any) -> "AzureMonitorTraceExporter":
+ """
+ Create an AzureMonitorTraceExporter from a connection string. This is
+ the recommended way of instantiation if a connection string is passed in
+ explicitly. If a user wants to use a connection string provided by
+ environment variable, the constructor of the exporter can be called
+ directly.
+
+ :param str conn_str: The connection string to be used for
+ authentication.
+ :keyword str api_version: The service API version used. Defaults to
+ latest.
+ :return: an instance of ~AzureMonitorTraceExporter
+ :rtype: ~azure.monitor.opentelemetry.exporter.AzureMonitorTraceExporter
+ """
+ return cls(connection_string=conn_str, **kwargs)
+
+
+# pylint: disable=too-many-statements
+# pylint: disable=too-many-branches
+# pylint: disable=protected-access
+# mypy: disable-error-code="assignment,attr-defined,index,operator,union-attr"
+@no_type_check
+def _convert_span_to_envelope(span: ReadableSpan) -> TelemetryItem:
+ # Update instrumentation bitmap if span was generated from instrumentation
+ _check_instrumentation_span(span)
+ duration = 0
+ start_time = 0
+ if span.start_time:
+ start_time = span.start_time
+ if span.end_time:
+ duration = span.end_time - span.start_time
+ envelope = _utils._create_telemetry_item(start_time)
+ envelope.tags.update(_utils._populate_part_a_fields(span.resource))
+ envelope.tags[ContextTagKeys.AI_OPERATION_ID] = "{:032x}".format(span.context.trace_id)
+ if SpanAttributes.ENDUSER_ID in span.attributes:
+ envelope.tags[ContextTagKeys.AI_USER_ID] = span.attributes[SpanAttributes.ENDUSER_ID]
+ if span.parent and span.parent.span_id:
+ envelope.tags[ContextTagKeys.AI_OPERATION_PARENT_ID] = "{:016x}".format(span.parent.span_id)
+ if span.kind in (SpanKind.CONSUMER, SpanKind.SERVER):
+ envelope.name = _REQUEST_ENVELOPE_NAME
+ data = RequestData(
+ name=span.name,
+ id="{:016x}".format(span.context.span_id),
+ duration=_utils.ns_to_duration(duration),
+ response_code="0",
+ success=span.status.is_ok,
+ properties={},
+ measurements={},
+ )
+ envelope.data = MonitorBase(base_data=data, base_type="RequestData")
+ envelope.tags[ContextTagKeys.AI_OPERATION_NAME] = span.name
+ location_ip = trace_utils._get_location_ip(span.attributes)
+ if location_ip:
+ envelope.tags[ContextTagKeys.AI_LOCATION_IP] = location_ip
+ if _AZURE_SDK_NAMESPACE_NAME in span.attributes: # Azure specific resources
+ # Currently only eventhub and servicebus are supported (kind CONSUMER)
+ data.source = trace_utils._get_azure_sdk_target_source(span.attributes)
+ if span.links:
+ total = 0
+ for link in span.links:
+ attributes = link.attributes
+ enqueued_time = attributes.get("enqueuedTime")
+ if isinstance(enqueued_time, int):
+ difference = (start_time / 1000000) - enqueued_time
+ total += difference
+ data.measurements["timeSinceEnqueued"] = max(0, total / len(span.links))
+ elif HTTP_REQUEST_METHOD in span.attributes or SpanAttributes.HTTP_METHOD in span.attributes: # HTTP
+ path = ""
+ user_agent = trace_utils._get_user_agent(span.attributes)
+ if user_agent:
+ # TODO: Not exposed in Swagger, need to update def
+ envelope.tags["ai.user.userAgent"] = user_agent
+ # url
+ url = trace_utils._get_url_for_http_request(span.attributes)
+ data.url = url
+ # Http specific logic for ai.operation.name
+ if SpanAttributes.HTTP_ROUTE in span.attributes:
+ envelope.tags[ContextTagKeys.AI_OPERATION_NAME] = "{} {}".format(
+ span.attributes.get(HTTP_REQUEST_METHOD) or span.attributes.get(SpanAttributes.HTTP_METHOD),
+ span.attributes[SpanAttributes.HTTP_ROUTE],
+ )
+ elif url:
+ try:
+ parse_url = urlparse(url)
+ path = parse_url.path
+ if not path:
+ path = "/"
+ envelope.tags[ContextTagKeys.AI_OPERATION_NAME] = "{} {}".format(
+ span.attributes.get(HTTP_REQUEST_METHOD) or span.attributes.get(SpanAttributes.HTTP_METHOD),
+ path,
+ )
+ except Exception: # pylint: disable=broad-except
+ pass
+ status_code = span.attributes.get(HTTP_RESPONSE_STATUS_CODE) \
+ or span.attributes.get(SpanAttributes.HTTP_STATUS_CODE)
+ if status_code:
+ try:
+ status_code = int(status_code) # type: ignore
+ except ValueError:
+ status_code = 0
+ else:
+ status_code = 0
+ data.response_code = str(status_code)
+ # Success criteria for server spans depends on span.success and the actual status code
+ data.success = span.status.is_ok and status_code and status_code not in range(400, 500)
+ elif SpanAttributes.MESSAGING_SYSTEM in span.attributes: # Messaging
+ if span.attributes.get(SpanAttributes.MESSAGING_DESTINATION):
+ if span.attributes.get(CLIENT_ADDRESS) or span.attributes.get(SpanAttributes.NET_PEER_NAME):
+ data.source = "{}/{}".format(
+ span.attributes.get(CLIENT_ADDRESS) or span.attributes.get(SpanAttributes.NET_PEER_NAME),
+ span.attributes.get(SpanAttributes.MESSAGING_DESTINATION),
+ )
+ elif span.attributes.get(SpanAttributes.NET_PEER_IP):
+ data.source = "{}/{}".format(
+ span.attributes[SpanAttributes.NET_PEER_IP],
+ span.attributes.get(SpanAttributes.MESSAGING_DESTINATION),
+ )
+ else:
+ data.source = span.attributes.get(SpanAttributes.MESSAGING_DESTINATION, "")
+ # Apply truncation
+ # See https://github.com/MohanGsk/ApplicationInsights-Home/tree/master/EndpointSpecs/Schemas/Bond
+ if envelope.tags.get(ContextTagKeys.AI_OPERATION_NAME):
+ data.name = envelope.tags[ContextTagKeys.AI_OPERATION_NAME][:1024]
+ if data.response_code:
+ data.response_code = data.response_code[:1024]
+ if data.source:
+ data.source = data.source[:1024]
+ if data.url:
+ data.url = data.url[:2048]
+ else: # INTERNAL, CLIENT, PRODUCER
+ envelope.name = _REMOTE_DEPENDENCY_ENVELOPE_NAME
+ # TODO: ai.operation.name for non-server spans
+ time = 0
+ if span.end_time and span.start_time:
+ time = span.end_time - span.start_time
+ data = RemoteDependencyData( # type: ignore
+ name=span.name,
+ id="{:016x}".format(span.context.span_id),
+ result_code="0",
+ duration=_utils.ns_to_duration(time),
+ success=span.status.is_ok, # Success depends only on span status
+ properties={},
+ )
+ envelope.data = MonitorBase(base_data=data, base_type="RemoteDependencyData")
+ target = trace_utils._get_target_for_dependency_from_peer(span.attributes)
+ if span.kind is SpanKind.CLIENT:
+ if _AZURE_SDK_NAMESPACE_NAME in span.attributes: # Azure specific resources
+ # Currently only eventhub and servicebus are supported
+ # https://github.com/Azure/azure-sdk-for-python/issues/9256
+ data.type = span.attributes[_AZURE_SDK_NAMESPACE_NAME]
+ data.target = trace_utils._get_azure_sdk_target_source(span.attributes)
+ elif HTTP_REQUEST_METHOD in span.attributes or SpanAttributes.HTTP_METHOD in span.attributes: # HTTP
+ data.type = "HTTP"
+ user_agent = trace_utils._get_user_agent(span.attributes)
+ if user_agent:
+ # TODO: Not exposed in Swagger, need to update def
+ envelope.tags["ai.user.userAgent"] = user_agent
+ url = trace_utils._get_url_for_http_dependency(span.attributes)
+ # data
+ if url:
+ data.data = url
+ target, path = trace_utils._get_target_and_path_for_http_dependency(
+ span.attributes,
+ url,
+ )
+ # http specific logic for name
+ if path:
+ data.name = "{} {}".format(
+ span.attributes.get(HTTP_REQUEST_METHOD) or \
+ span.attributes.get(SpanAttributes.HTTP_METHOD),
+ path,
+ )
+ status_code = span.attributes.get(HTTP_RESPONSE_STATUS_CODE) or \
+ span.attributes.get(SpanAttributes.HTTP_STATUS_CODE)
+ if status_code:
+ try:
+ status_code = int(status_code) # type: ignore
+ except ValueError:
+ status_code = 0
+ else:
+ status_code = 0
+ data.result_code = str(status_code)
+ elif SpanAttributes.DB_SYSTEM in span.attributes: # Database
+ db_system = span.attributes[SpanAttributes.DB_SYSTEM]
+ if db_system == DbSystemValues.MYSQL.value:
+ data.type = "mysql"
+ elif db_system == DbSystemValues.POSTGRESQL.value:
+ data.type = "postgresql"
+ elif db_system == DbSystemValues.MONGODB.value:
+ data.type = "mongodb"
+ elif db_system == DbSystemValues.REDIS.value:
+ data.type = "redis"
+ elif trace_utils._is_sql_db(str(db_system)):
+ data.type = "SQL"
+ else:
+ data.type = db_system
+ # data is the full statement or operation
+ if SpanAttributes.DB_STATEMENT in span.attributes:
+ data.data = span.attributes[SpanAttributes.DB_STATEMENT]
+ elif SpanAttributes.DB_OPERATION in span.attributes:
+ data.data = span.attributes[SpanAttributes.DB_OPERATION]
+ # db specific logic for target
+ target = trace_utils._get_target_for_db_dependency(
+ target, # type: ignore
+ db_system, # type: ignore
+ span.attributes,
+ )
+ elif SpanAttributes.MESSAGING_SYSTEM in span.attributes: # Messaging
+ data.type = span.attributes[SpanAttributes.MESSAGING_SYSTEM]
+ target = trace_utils._get_target_for_messaging_dependency(
+ target, # type: ignore
+ span.attributes,
+ )
+ elif SpanAttributes.RPC_SYSTEM in span.attributes: # Rpc
+ data.type = SpanAttributes.RPC_SYSTEM
+ target = trace_utils._get_target_for_rpc_dependency(
+ target, # type: ignore
+ span.attributes,
+ )
+ elif gen_ai_attributes.GEN_AI_SYSTEM in span.attributes: # GenAI
+ data.type = span.attributes[gen_ai_attributes.GEN_AI_SYSTEM]
+ else:
+ data.type = "N/A"
+ elif span.kind is SpanKind.PRODUCER: # Messaging
+ # Currently only eventhub and servicebus are supported that produce PRODUCER spans
+ if _AZURE_SDK_NAMESPACE_NAME in span.attributes:
+ data.type = "Queue Message | {}".format(span.attributes[_AZURE_SDK_NAMESPACE_NAME])
+ target = trace_utils._get_azure_sdk_target_source(span.attributes)
+ else:
+ data.type = "Queue Message"
+ msg_system = span.attributes.get(SpanAttributes.MESSAGING_SYSTEM)
+ if msg_system:
+ data.type += " | {}".format(msg_system)
+ target = trace_utils._get_target_for_messaging_dependency(
+ target, # type: ignore
+ span.attributes,
+ )
+ else: # SpanKind.INTERNAL
+ data.type = "InProc"
+ if _AZURE_SDK_NAMESPACE_NAME in span.attributes:
+ data.type += " | {}".format(span.attributes[_AZURE_SDK_NAMESPACE_NAME])
+ # Apply truncation
+ # See https://github.com/MohanGsk/ApplicationInsights-Home/tree/master/EndpointSpecs/Schemas/Bond
+ if data.name:
+ data.name = str(data.name)[:1024]
+ if data.result_code:
+ data.result_code = str(data.result_code)[:1024]
+ if data.data:
+ data.data = str(data.data)[:8192]
+ if data.type:
+ data.type = str(data.type)[:1024]
+ if target:
+ data.target = str(target)[:1024]
+
+ # sampleRate
+ if _SAMPLE_RATE_KEY in span.attributes:
+ envelope.sample_rate = span.attributes[_SAMPLE_RATE_KEY]
+
+ data.properties = _utils._filter_custom_properties(
+ span.attributes, lambda key, val: not _is_standard_attribute(key)
+ )
+
+ # Standard metrics special properties
+ # Only add the property if span was generated from instrumentation that supports metrics collection
+ if (
+ span.instrumentation_scope is not None
+ and span.instrumentation_scope.name in _INSTRUMENTATION_SUPPORTING_METRICS_LIST
+ ):
+ data.properties["_MS.ProcessedByMetricExtractors"] = "True"
+
+ if span.links:
+ # Max length for value is 8192
+ # Since links are a fixed length (80) in json, max number of links would be 102
+ links: List[Dict[str, str]] = []
+ for link in span.links:
+ if len(links) > 102:
+ break
+ operation_id = "{:032x}".format(link.context.trace_id)
+ span_id = "{:016x}".format(link.context.span_id)
+ links.append({"operation_Id": operation_id, "id": span_id})
+ data.properties["_MS.links"] = json.dumps(links)
+ return envelope
+
+
+# pylint: disable=protected-access
+def _convert_span_events_to_envelopes(span: ReadableSpan) -> Sequence[TelemetryItem]:
+ envelopes = []
+ for event in span.events:
+ envelope = _utils._create_telemetry_item(event.timestamp)
+ envelope.tags.update(_utils._populate_part_a_fields(span.resource))
+ envelope.tags[ContextTagKeys.AI_OPERATION_ID] = "{:032x}".format(span.context.trace_id)
+ if span.context and span.context.span_id:
+ envelope.tags[ContextTagKeys.AI_OPERATION_PARENT_ID] = "{:016x}".format(span.context.span_id)
+
+ # sampleRate
+ if span.attributes and _SAMPLE_RATE_KEY in span.attributes:
+ envelope.sample_rate = span.attributes[_SAMPLE_RATE_KEY]
+
+ properties = _utils._filter_custom_properties(
+ event.attributes, lambda key, val: not _is_standard_attribute(key)
+ )
+ if event.name == "exception":
+ envelope.name = _EXCEPTION_ENVELOPE_NAME
+ exc_type = exc_message = stack_trace = None
+ if event.attributes:
+ exc_type = event.attributes.get(SpanAttributes.EXCEPTION_TYPE)
+ exc_message = event.attributes.get(SpanAttributes.EXCEPTION_MESSAGE)
+ stack_trace = event.attributes.get(SpanAttributes.EXCEPTION_STACKTRACE)
+ if not exc_type:
+ exc_type = "Exception"
+ if not exc_message:
+ exc_message = "Exception"
+ has_full_stack = stack_trace is not None
+ exc_details = TelemetryExceptionDetails(
+ type_name=str(exc_type)[:1024],
+ message=str(exc_message)[:32768],
+ has_full_stack=has_full_stack,
+ stack=str(stack_trace)[:32768],
+ )
+ data = TelemetryExceptionData(
+ properties=properties,
+ exceptions=[exc_details],
+ )
+ envelope.data = MonitorBase(base_data=data, base_type="ExceptionData")
+ else:
+ envelope.name = _MESSAGE_ENVELOPE_NAME
+ data = MessageData( # type: ignore
+ message=str(event.name)[:32768],
+ properties=properties,
+ )
+ envelope.data = MonitorBase(base_data=data, base_type="MessageData")
+
+ envelopes.append(envelope)
+
+ return envelopes
+
+
+def _check_instrumentation_span(span: ReadableSpan) -> None:
+ # Special use-case for spans generated from azure-sdk services
+ # Identified by having az.namespace as a span attribute
+ if span.attributes and _AZURE_SDK_NAMESPACE_NAME in span.attributes:
+ _utils.add_instrumentation(_AZURE_SDK_OPENTELEMETRY_NAME)
+ return
+ if span.instrumentation_scope is None:
+ return
+ # All instrumentation scope names from OpenTelemetry instrumentations have
+ # `opentelemetry.instrumentation.` as a prefix
+ if span.instrumentation_scope.name.startswith("opentelemetry.instrumentation."):
+ # The string after the prefix is the name of the instrumentation
+ name = span.instrumentation_scope.name.split("opentelemetry.instrumentation.", 1)[1]
+ # Update the bit map to indicate instrumentation is being used
+ _utils.add_instrumentation(name)
+
+
+def _is_standard_attribute(key: str) -> bool:
+ for prefix in _STANDARD_OPENTELEMETRY_ATTRIBUTE_PREFIXES:
+ if key.startswith(prefix):
+ return True
+ return key in _STANDARD_AZURE_MONITOR_ATTRIBUTES or \
+ key in _STANDARD_OPENTELEMETRY_HTTP_ATTRIBUTES
+
+
+def _get_trace_export_result(result: ExportResult) -> SpanExportResult:
+ if result == ExportResult.SUCCESS:
+ return SpanExportResult.SUCCESS
+ return SpanExportResult.FAILURE
diff --git a/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/export/trace/_sampling.py b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/export/trace/_sampling.py
new file mode 100644
index 00000000..0b41e28e
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/export/trace/_sampling.py
@@ -0,0 +1,98 @@
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License.
+from typing import Optional, Sequence
+
+# pylint:disable=no-name-in-module
+from fixedint import Int32
+
+from opentelemetry.context import Context
+from opentelemetry.trace import Link, SpanKind, format_trace_id
+from opentelemetry.sdk.trace.sampling import (
+ Decision,
+ Sampler,
+ SamplingResult,
+ _get_parent_trace_state,
+)
+from opentelemetry.trace.span import TraceState
+from opentelemetry.util.types import Attributes
+
+from azure.monitor.opentelemetry.exporter._constants import _SAMPLE_RATE_KEY
+
+
+_HASH = 5381
+_INTEGER_MAX: int = Int32.maxval
+_INTEGER_MIN: int = Int32.minval
+
+
+# Sampler is responsible for the following:
+# Implements same trace id hashing algorithm so that traces are sampled the same across multiple nodes (via AI SDKS)
+# Adds item count to span attribute if span is sampled (needed for ingestion service)
+# Inherits from the Sampler interface as defined by OpenTelemetry
+# https://github.com/open-telemetry/opentelemetry-specification/blob/main/specification/trace/sdk.md#sampler
+class ApplicationInsightsSampler(Sampler):
+ """Sampler that implements the same probability sampling algorithm as the ApplicationInsights SDKs."""
+
+ # sampling_ratio must take a value in the range [0,1]
+ def __init__(self, sampling_ratio: float = 1.0):
+ if not 0.0 <= sampling_ratio <= 1.0:
+ raise ValueError("sampling_ratio must be in the range [0,1]")
+ self._ratio = sampling_ratio
+ self._sample_rate = sampling_ratio * 100
+
+ # pylint:disable=C0301
+ # See https://github.com/microsoft/Telemetry-Collection-Spec/blob/main/OpenTelemetry/trace/ApplicationInsightsSampler.md
+ def should_sample(
+ self,
+ parent_context: Optional[Context],
+ trace_id: int,
+ name: str,
+ kind: Optional[SpanKind] = None,
+ attributes: Attributes = None,
+ links: Optional[Sequence["Link"]] = None,
+ trace_state: Optional["TraceState"] = None,
+ ) -> "SamplingResult":
+ if self._sample_rate == 0:
+ decision = Decision.DROP
+ elif self._sample_rate == 100.0:
+ decision = Decision.RECORD_AND_SAMPLE
+ else:
+ # Determine if should sample from ratio and traceId
+ sample_score = self._get_DJB2_sample_score(format_trace_id(trace_id).lower())
+ if sample_score < self._ratio:
+ decision = Decision.RECORD_AND_SAMPLE
+ else:
+ decision = Decision.DROP
+ # Add sample rate as span attribute
+ if attributes is None:
+ attributes = {}
+ attributes[_SAMPLE_RATE_KEY] = self._sample_rate # type: ignore
+ return SamplingResult(
+ decision,
+ attributes,
+ _get_parent_trace_state(parent_context), # type: ignore
+ )
+
+ def _get_DJB2_sample_score(self, trace_id_hex: str) -> float:
+ # This algorithm uses 32bit integers
+ hash_value = Int32(_HASH)
+ for char in trace_id_hex:
+ hash_value = ((hash_value << 5) + hash_value) + ord(char)
+
+ if hash_value == _INTEGER_MIN:
+ hash_value = int(_INTEGER_MAX)
+ else:
+ hash_value = abs(hash_value)
+
+ # divide by _INTEGER_MAX for value between 0 and 1 for sampling score
+ return float(hash_value) / _INTEGER_MAX
+
+ def get_description(self) -> str:
+ return "ApplicationInsightsSampler{}".format(self._ratio)
+
+
+def azure_monitor_opentelemetry_sampler_factory(sampler_argument): # pylint: disable=name-too-long
+ try:
+ rate = float(sampler_argument)
+ return ApplicationInsightsSampler(rate)
+ except (ValueError, TypeError):
+ return ApplicationInsightsSampler()
diff --git a/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/export/trace/_utils.py b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/export/trace/_utils.py
new file mode 100644
index 00000000..de012cfd
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/export/trace/_utils.py
@@ -0,0 +1,321 @@
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License.
+
+from typing import no_type_check, Optional, Tuple
+from urllib.parse import urlparse
+
+from opentelemetry.semconv.attributes import (
+ client_attributes,
+ server_attributes,
+ url_attributes,
+ user_agent_attributes,
+)
+from opentelemetry.semconv.trace import DbSystemValues, SpanAttributes
+from opentelemetry.util.types import Attributes
+
+
+# pylint:disable=too-many-return-statements
+def _get_default_port_db(db_system: str) -> int:
+ if db_system == DbSystemValues.POSTGRESQL.value:
+ return 5432
+ if db_system == DbSystemValues.CASSANDRA.value:
+ return 9042
+ if db_system in (DbSystemValues.MARIADB.value, DbSystemValues.MYSQL.value):
+ return 3306
+ if db_system == DbSystemValues.MSSQL.value:
+ return 1433
+ # TODO: Add in memcached
+ if db_system == "memcached":
+ return 11211
+ if db_system == DbSystemValues.DB2.value:
+ return 50000
+ if db_system == DbSystemValues.ORACLE.value:
+ return 1521
+ if db_system == DbSystemValues.H2.value:
+ return 8082
+ if db_system == DbSystemValues.DERBY.value:
+ return 1527
+ if db_system == DbSystemValues.REDIS.value:
+ return 6379
+ return 0
+
+
+def _get_default_port_http(attributes: Attributes) -> int:
+ scheme = _get_http_scheme(attributes)
+ if scheme == "http":
+ return 80
+ if scheme == "https":
+ return 443
+ return 0
+
+
+def _is_sql_db(db_system: str) -> bool:
+ return db_system in (
+ DbSystemValues.DB2.value,
+ DbSystemValues.DERBY.value,
+ DbSystemValues.MARIADB.value,
+ DbSystemValues.MSSQL.value,
+ DbSystemValues.ORACLE.value,
+ DbSystemValues.SQLITE.value,
+ DbSystemValues.OTHER_SQL.value,
+ # spell-checker:ignore HSQLDB
+ DbSystemValues.HSQLDB.value,
+ DbSystemValues.H2.value,
+ )
+
+
+def _get_azure_sdk_target_source(attributes: Attributes) -> Optional[str]:
+ # Currently logic only works for ServiceBus and EventHub
+ if attributes:
+ # New semconv attributes: https://github.com/Azure/azure-sdk-for-python/pull/29203
+ # TODO: Keep track of when azure-sdk supports stable semconv for these fields
+ peer_address = attributes.get("net.peer.name") or attributes.get("peer.address")
+ destination = attributes.get("messaging.destination.name") or attributes.get("message_bus.destination")
+ if peer_address and destination:
+ return str(peer_address) + "/" + str(destination)
+ return None
+
+
+def _get_http_scheme(attributes: Attributes) -> Optional[str]:
+ if attributes:
+ scheme = attributes.get(url_attributes.URL_SCHEME) or \
+ attributes.get(SpanAttributes.HTTP_SCHEME)
+ if scheme:
+ return str(scheme)
+ return None
+
+
+# Dependency
+
+
+@no_type_check
+def _get_url_for_http_dependency(attributes: Attributes) -> Optional[str]:
+ url = ""
+ if attributes:
+ # Stable sem conv only supports populating url from `url.full`
+ if url_attributes.URL_FULL in attributes:
+ return attributes[url_attributes.URL_FULL]
+ if SpanAttributes.HTTP_URL in attributes:
+ return attributes[SpanAttributes.HTTP_URL]
+ # Scheme
+ scheme = _get_http_scheme(attributes)
+ if scheme and SpanAttributes.HTTP_TARGET in attributes:
+ http_target = attributes[SpanAttributes.HTTP_TARGET]
+ if SpanAttributes.HTTP_HOST in attributes:
+ url = "{}://{}{}".format(
+ str(scheme),
+ attributes[SpanAttributes.HTTP_HOST],
+ http_target,
+ )
+ elif SpanAttributes.NET_PEER_PORT in attributes:
+ peer_port = attributes[SpanAttributes.NET_PEER_PORT]
+ if SpanAttributes.NET_PEER_NAME in attributes:
+ peer_name = attributes[SpanAttributes.NET_PEER_NAME]
+ url = "{}://{}:{}{}".format(
+ scheme,
+ peer_name,
+ peer_port,
+ http_target,
+ )
+ elif SpanAttributes.NET_PEER_IP in attributes:
+ peer_ip = attributes[SpanAttributes.NET_PEER_IP]
+ url = "{}://{}:{}{}".format(
+ scheme,
+ peer_ip,
+ peer_port,
+ http_target,
+ )
+ return url
+
+
+@no_type_check
+def _get_target_for_dependency_from_peer(attributes: Attributes) -> Optional[str]:
+ target = ""
+ if attributes:
+ if SpanAttributes.PEER_SERVICE in attributes:
+ target = attributes[SpanAttributes.PEER_SERVICE]
+ else:
+ if SpanAttributes.NET_PEER_NAME in attributes:
+ target = attributes[SpanAttributes.NET_PEER_NAME]
+ elif SpanAttributes.NET_PEER_IP in attributes:
+ target = attributes[SpanAttributes.NET_PEER_IP]
+ if SpanAttributes.NET_PEER_PORT in attributes:
+ port = attributes[SpanAttributes.NET_PEER_PORT]
+ # TODO: check default port for rpc
+ # This logic assumes default ports never conflict across dependency types
+ if port != _get_default_port_http(attributes) and \
+ port != _get_default_port_db(str(attributes.get(SpanAttributes.DB_SYSTEM))):
+ target = "{}:{}".format(target, port)
+ return target
+
+
+@no_type_check
+def _get_target_and_path_for_http_dependency(
+ attributes: Attributes,
+ url: Optional[str] = "", # Usually populated by _get_url_for_http_dependency()
+) -> Tuple[Optional[str], str]:
+ parsed_url = None
+ target = ""
+ path = "/"
+ default_port = _get_default_port_http(attributes)
+ # Find path from url
+ if not url:
+ url = _get_url_for_http_dependency(attributes)
+ try:
+ parsed_url = urlparse(url)
+ if parsed_url.path:
+ path = parsed_url.path
+ except Exception: # pylint: disable=broad-except
+ pass
+ # Derive target
+ if attributes:
+ # Target from server.*
+ if server_attributes.SERVER_ADDRESS in attributes:
+ target = attributes[server_attributes.SERVER_ADDRESS]
+ server_port = attributes.get(server_attributes.SERVER_PORT)
+ # if not default port, include port in target
+ if server_port != default_port:
+ target = "{}:{}".format(target, server_port)
+ # Target from peer.service
+ elif SpanAttributes.PEER_SERVICE in attributes:
+ target = attributes[SpanAttributes.PEER_SERVICE]
+ # Target from http.host
+ elif SpanAttributes.HTTP_HOST in attributes:
+ host = attributes[SpanAttributes.HTTP_HOST]
+ try:
+ # urlparse insists on absolute URLs starting with "//"
+ # This logic assumes host does not include a "//"
+ host_name = urlparse("//" + str(host))
+ # Ignore port from target if default port
+ if host_name.port == default_port:
+ target = host_name.hostname
+ else:
+ # Else include the whole host as the target
+ target = str(host)
+ except Exception: # pylint: disable=broad-except
+ pass
+ elif parsed_url:
+ # Target from httpUrl
+ if parsed_url.port and parsed_url.port == default_port:
+ if parsed_url.hostname:
+ target = parsed_url.hostname
+ elif parsed_url.netloc:
+ target = parsed_url.netloc
+ if not target:
+ # Get target from peer.* attributes that are NOT peer.service
+ target = _get_target_for_dependency_from_peer(attributes)
+ return (target, path)
+
+
+@no_type_check
+def _get_target_for_db_dependency(
+ target: Optional[str],
+ db_system: Optional[str],
+ attributes: Attributes,
+) -> Optional[str]:
+ if attributes:
+ db_name = attributes.get(SpanAttributes.DB_NAME)
+ if db_name:
+ if not target:
+ target = str(db_name)
+ else:
+ target = "{}|{}".format(target, db_name)
+ elif not target:
+ target = db_system
+ return target
+
+
+@no_type_check
+def _get_target_for_messaging_dependency(target: Optional[str], attributes: Attributes) -> Optional[str]:
+ if attributes:
+ if not target:
+ if SpanAttributes.MESSAGING_DESTINATION in attributes:
+ target = str(attributes[SpanAttributes.MESSAGING_DESTINATION])
+ elif SpanAttributes.MESSAGING_SYSTEM in attributes:
+ target = str(attributes[SpanAttributes.MESSAGING_SYSTEM])
+ return target
+
+
+@no_type_check
+def _get_target_for_rpc_dependency(target: Optional[str], attributes: Attributes) -> Optional[str]:
+ if attributes:
+ if not target:
+ if SpanAttributes.RPC_SYSTEM in attributes:
+ target = str(attributes[SpanAttributes.RPC_SYSTEM])
+ return target
+
+
+# Request
+
+@no_type_check
+def _get_location_ip(attributes: Attributes) -> Optional[str]:
+ return attributes.get(client_attributes.CLIENT_ADDRESS) or \
+ attributes.get(SpanAttributes.HTTP_CLIENT_IP) or \
+ attributes.get(SpanAttributes.NET_PEER_IP) # We assume non-http spans don't have http related attributes
+
+
+@no_type_check
+def _get_user_agent(attributes: Attributes) -> Optional[str]:
+ return attributes.get(user_agent_attributes.USER_AGENT_ORIGINAL) or \
+ attributes.get(SpanAttributes.HTTP_USER_AGENT)
+
+
+@no_type_check
+def _get_url_for_http_request(attributes: Attributes) -> Optional[str]:
+ url = ""
+ if attributes:
+ # Url
+ if url_attributes.URL_FULL in attributes:
+ return attributes[url_attributes.URL_FULL]
+ if SpanAttributes.HTTP_URL in attributes:
+ return attributes[SpanAttributes.HTTP_URL]
+ # Scheme
+ scheme = _get_http_scheme(attributes)
+ # Target
+ http_target = ""
+ if url_attributes.URL_PATH in attributes:
+ http_target = attributes.get(url_attributes.URL_PATH, "")
+ if http_target and url_attributes.URL_QUERY in attributes:
+ http_target = "{}?{}".format(
+ http_target,
+ attributes.get(url_attributes.URL_QUERY, "")
+ )
+ elif SpanAttributes.HTTP_TARGET in attributes:
+ http_target = attributes.get(SpanAttributes.HTTP_TARGET)
+ if scheme and http_target:
+ # Host
+ http_host = ""
+ if server_attributes.SERVER_ADDRESS in attributes:
+ http_host = attributes.get(server_attributes.SERVER_ADDRESS, "")
+ if http_host and server_attributes.SERVER_PORT in attributes:
+ http_host = "{}:{}".format(
+ http_host,
+ attributes.get(server_attributes.SERVER_PORT, "")
+ )
+ elif SpanAttributes.HTTP_HOST in attributes:
+ http_host = attributes.get(SpanAttributes.HTTP_HOST, "")
+ if http_host:
+ url = "{}://{}{}".format(
+ scheme,
+ http_host,
+ http_target,
+ )
+ elif SpanAttributes.HTTP_SERVER_NAME in attributes:
+ server_name = attributes[SpanAttributes.HTTP_SERVER_NAME]
+ host_port = attributes.get(SpanAttributes.NET_HOST_PORT, "")
+ url = "{}://{}:{}{}".format(
+ scheme,
+ server_name,
+ host_port,
+ http_target,
+ )
+ elif SpanAttributes.NET_HOST_NAME in attributes:
+ host_name = attributes[SpanAttributes.NET_HOST_NAME]
+ host_port = attributes.get(SpanAttributes.NET_HOST_PORT, "")
+ url = "{}://{}:{}{}".format(
+ scheme,
+ host_name,
+ host_port,
+ http_target,
+ )
+ return url
diff --git a/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/py.typed b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/py.typed
new file mode 100644
index 00000000..8b137891
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/py.typed
@@ -0,0 +1 @@
+
diff --git a/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/statsbeat/__init__.py b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/statsbeat/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/statsbeat/__init__.py
diff --git a/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/statsbeat/_exporter.py b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/statsbeat/_exporter.py
new file mode 100644
index 00000000..c476d3ac
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/statsbeat/_exporter.py
@@ -0,0 +1,29 @@
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License.
+from typing import Optional
+from opentelemetry.sdk.metrics.export import DataPointT
+from opentelemetry.sdk.resources import Resource
+from opentelemetry.sdk.util.instrumentation import InstrumentationScope
+
+from azure.monitor.opentelemetry.exporter._generated.models import TelemetryItem
+from azure.monitor.opentelemetry.exporter import AzureMonitorMetricExporter
+from azure.monitor.opentelemetry.exporter._constants import _STATSBEAT_METRIC_NAME_MAPPINGS
+
+
+class _StatsBeatExporter(AzureMonitorMetricExporter):
+
+ def _point_to_envelope(
+ self,
+ point: DataPointT,
+ name: str,
+ resource: Optional[Resource] = None,
+ scope: Optional[InstrumentationScope] = None,
+ ) -> Optional[TelemetryItem]:
+ # map statsbeat name from OpenTelemetry name
+ name = _STATSBEAT_METRIC_NAME_MAPPINGS[name]
+ return super()._point_to_envelope(
+ point,
+ name,
+ resource,
+ None,
+ )
diff --git a/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/statsbeat/_state.py b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/statsbeat/_state.py
new file mode 100644
index 00000000..e555ba71
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/statsbeat/_state.py
@@ -0,0 +1,70 @@
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License.
+import os
+import threading
+from typing import Dict, Union
+
+from azure.monitor.opentelemetry.exporter._constants import _APPLICATIONINSIGHTS_STATSBEAT_DISABLED_ALL
+
+_REQUESTS_MAP: Dict[str, Union[int, Dict[int, int]]] = {}
+_REQUESTS_MAP_LOCK = threading.Lock()
+
+_STATSBEAT_STATE = {
+ "INITIAL_FAILURE_COUNT": 0,
+ "INITIAL_SUCCESS": False,
+ "SHUTDOWN": False,
+ "CUSTOM_EVENTS_FEATURE_SET": False,
+ "LIVE_METRICS_FEATURE_SET": False,
+}
+_STATSBEAT_STATE_LOCK = threading.Lock()
+_STATSBEAT_FAILURE_COUNT_THRESHOLD = 3
+
+
+def is_statsbeat_enabled():
+ disabled = os.environ.get(_APPLICATIONINSIGHTS_STATSBEAT_DISABLED_ALL)
+ return disabled is None or disabled.lower() != "true"
+
+
+def increment_statsbeat_initial_failure_count(): # pylint: disable=name-too-long
+ with _STATSBEAT_STATE_LOCK:
+ _STATSBEAT_STATE["INITIAL_FAILURE_COUNT"] += 1
+
+
+def increment_and_check_statsbeat_failure_count(): # pylint: disable=name-too-long
+ increment_statsbeat_initial_failure_count()
+ return get_statsbeat_initial_failure_count() >= _STATSBEAT_FAILURE_COUNT_THRESHOLD
+
+
+def get_statsbeat_initial_failure_count():
+ return _STATSBEAT_STATE["INITIAL_FAILURE_COUNT"]
+
+
+def set_statsbeat_initial_success(success):
+ with _STATSBEAT_STATE_LOCK:
+ _STATSBEAT_STATE["INITIAL_SUCCESS"] = success
+
+
+def get_statsbeat_initial_success():
+ return _STATSBEAT_STATE["INITIAL_SUCCESS"]
+
+
+def get_statsbeat_shutdown():
+ return _STATSBEAT_STATE["SHUTDOWN"]
+
+
+def get_statsbeat_custom_events_feature_set():
+ return _STATSBEAT_STATE["CUSTOM_EVENTS_FEATURE_SET"]
+
+
+def set_statsbeat_custom_events_feature_set():
+ with _STATSBEAT_STATE_LOCK:
+ _STATSBEAT_STATE["CUSTOM_EVENTS_FEATURE_SET"] = True
+
+
+def get_statsbeat_live_metrics_feature_set():
+ return _STATSBEAT_STATE["LIVE_METRICS_FEATURE_SET"]
+
+
+def set_statsbeat_live_metrics_feature_set():
+ with _STATSBEAT_STATE_LOCK:
+ _STATSBEAT_STATE["LIVE_METRICS_FEATURE_SET"] = True
diff --git a/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/statsbeat/_statsbeat.py b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/statsbeat/_statsbeat.py
new file mode 100644
index 00000000..e6dcee0c
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/statsbeat/_statsbeat.py
@@ -0,0 +1,77 @@
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License.
+import threading
+
+from opentelemetry.sdk.metrics import MeterProvider
+from opentelemetry.sdk.metrics.export import PeriodicExportingMetricReader
+from opentelemetry.sdk.resources import Resource
+
+from azure.monitor.opentelemetry.exporter.statsbeat._exporter import _StatsBeatExporter
+from azure.monitor.opentelemetry.exporter.statsbeat._statsbeat_metrics import _StatsbeatMetrics
+from azure.monitor.opentelemetry.exporter.statsbeat._state import (
+ _STATSBEAT_STATE,
+ _STATSBEAT_STATE_LOCK,
+)
+from azure.monitor.opentelemetry.exporter.statsbeat._utils import (
+ _get_stats_connection_string,
+ _get_stats_long_export_interval,
+ _get_stats_short_export_interval,
+)
+
+
+_STATSBEAT_METRICS = None
+_STATSBEAT_LOCK = threading.Lock()
+
+
+# pylint: disable=global-statement
+# pylint: disable=protected-access
+def collect_statsbeat_metrics(exporter) -> None:
+ global _STATSBEAT_METRICS
+ # Only start statsbeat if did not exist before
+ if _STATSBEAT_METRICS is None:
+ with _STATSBEAT_LOCK:
+ statsbeat_exporter = _StatsBeatExporter(
+ connection_string=_get_stats_connection_string(exporter._endpoint),
+ disable_offline_storage=exporter._disable_offline_storage,
+ )
+ reader = PeriodicExportingMetricReader(
+ statsbeat_exporter,
+ export_interval_millis=_get_stats_short_export_interval() * 1000, # 15m by default
+ )
+ mp = MeterProvider(
+ metric_readers=[reader],
+ resource=Resource.get_empty(),
+ )
+ # long_interval_threshold represents how many collects for short interval
+ # should have passed before a long interval collect
+ long_interval_threshold = _get_stats_long_export_interval() // _get_stats_short_export_interval()
+ _STATSBEAT_METRICS = _StatsbeatMetrics(
+ mp,
+ exporter._instrumentation_key,
+ exporter._endpoint,
+ exporter._disable_offline_storage,
+ long_interval_threshold,
+ exporter._credential is not None,
+ exporter._distro_version,
+ )
+ # Export some initial stats on program start
+ mp.force_flush()
+ # initialize non-initial stats
+ _STATSBEAT_METRICS.init_non_initial_metrics()
+
+
+def shutdown_statsbeat_metrics() -> None:
+ global _STATSBEAT_METRICS
+ shutdown_success = False
+ if _STATSBEAT_METRICS is not None:
+ with _STATSBEAT_LOCK:
+ try:
+ if _STATSBEAT_METRICS._meter_provider is not None:
+ _STATSBEAT_METRICS._meter_provider.shutdown()
+ _STATSBEAT_METRICS = None
+ shutdown_success = True
+ except: # pylint: disable=bare-except
+ pass
+ if shutdown_success:
+ with _STATSBEAT_STATE_LOCK:
+ _STATSBEAT_STATE["SHUTDOWN"] = True
diff --git a/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/statsbeat/_statsbeat_metrics.py b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/statsbeat/_statsbeat_metrics.py
new file mode 100644
index 00000000..db84e01b
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/statsbeat/_statsbeat_metrics.py
@@ -0,0 +1,417 @@
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License.
+from enum import Enum
+import json
+import os
+import platform
+import re
+import sys
+import threading
+from typing import Any, Dict, Iterable, List
+
+import requests # pylint: disable=networking-import-outside-azure-core-transport
+
+from opentelemetry.metrics import CallbackOptions, Observation
+from opentelemetry.sdk.metrics import MeterProvider
+
+from azure.monitor.opentelemetry.exporter import VERSION
+from azure.monitor.opentelemetry.exporter._constants import (
+ _ATTACH_METRIC_NAME,
+ _FEATURE_METRIC_NAME,
+ _REQ_DURATION_NAME,
+ _REQ_EXCEPTION_NAME,
+ _REQ_FAILURE_NAME,
+ _REQ_RETRY_NAME,
+ _REQ_SUCCESS_NAME,
+ _REQ_THROTTLE_NAME,
+ _WEBSITE_HOME_STAMPNAME,
+ _WEBSITE_HOSTNAME,
+ _WEBSITE_SITE_NAME,
+ _AKS_ARM_NAMESPACE_ID,
+)
+from azure.monitor.opentelemetry.exporter.statsbeat._state import (
+ _REQUESTS_MAP_LOCK,
+ _REQUESTS_MAP,
+ get_statsbeat_live_metrics_feature_set,
+ get_statsbeat_custom_events_feature_set,
+)
+from azure.monitor.opentelemetry.exporter import _utils
+
+# cSpell:disable
+
+_AIMS_URI = "http://169.254.169.254/metadata/instance/compute"
+_AIMS_API_VERSION = "api-version=2017-12-01"
+_AIMS_FORMAT = "format=json"
+
+_ENDPOINT_TYPES = ["breeze"]
+
+
+class _RP_Names(Enum):
+ APP_SERVICE = "appsvc"
+ FUNCTIONS = "functions"
+ AKS = "aks"
+ VM = "vm"
+ UNKNOWN = "unknown"
+
+
+_HOST_PATTERN = re.compile("^https?://(?:www\\.)?([^/.]+)")
+
+
+class _FEATURE_TYPES:
+ FEATURE = 0
+ INSTRUMENTATION = 1
+
+
+class _StatsbeatFeature:
+ NONE = 0
+ DISK_RETRY = 1
+ AAD = 2
+ CUSTOM_EVENTS_EXTENSION = 4
+ DISTRO = 8
+ LIVE_METRICS = 16
+
+
+class _AttachTypes:
+ MANUAL = "Manual"
+ INTEGRATED = "IntegratedAuto"
+ STANDALONE = "StandaloneAuto"
+
+
+# pylint: disable=R0902
+class _StatsbeatMetrics:
+
+ _COMMON_ATTRIBUTES: Dict[str, Any] = {
+ "rp": _RP_Names.UNKNOWN.value,
+ "attach": _AttachTypes.MANUAL,
+ "cikey": None,
+ "runtimeVersion": platform.python_version(),
+ "os": platform.system(),
+ "language": "python",
+ "version": VERSION,
+ }
+
+ _NETWORK_ATTRIBUTES: Dict[str, Any] = {
+ "endpoint": _ENDPOINT_TYPES[0], # breeze
+ "host": None,
+ }
+
+ _FEATURE_ATTRIBUTES: Dict[str, Any] = {
+ "feature": None, # 64-bit long, bits represent features enabled
+ "type": _FEATURE_TYPES.FEATURE,
+ }
+
+ _INSTRUMENTATION_ATTRIBUTES: Dict[str, Any] = {
+ "feature": 0, # 64-bit long, bits represent instrumentations used
+ "type": _FEATURE_TYPES.INSTRUMENTATION,
+ }
+
+ def __init__(
+ self,
+ meter_provider: MeterProvider,
+ instrumentation_key: str,
+ endpoint: str,
+ disable_offline_storage: bool,
+ long_interval_threshold: int,
+ has_credential: bool,
+ distro_version: str = "",
+ ) -> None:
+ self._ikey = instrumentation_key
+ self._feature = _StatsbeatFeature.NONE
+ if not disable_offline_storage:
+ self._feature |= _StatsbeatFeature.DISK_RETRY
+ if has_credential:
+ self._feature |= _StatsbeatFeature.AAD
+ if distro_version:
+ self._feature |= _StatsbeatFeature.DISTRO
+ if get_statsbeat_custom_events_feature_set():
+ self._feature |= _StatsbeatFeature.CUSTOM_EVENTS_EXTENSION
+ if get_statsbeat_live_metrics_feature_set():
+ self._feature |= _StatsbeatFeature.LIVE_METRICS
+ self._ikey = instrumentation_key
+ self._meter_provider = meter_provider
+ self._meter = self._meter_provider.get_meter(__name__)
+ self._long_interval_threshold = long_interval_threshold
+ # Start internal count at the max size for initial statsbeat export
+ self._long_interval_count_map = {
+ _ATTACH_METRIC_NAME[0]: sys.maxsize,
+ _FEATURE_METRIC_NAME[0]: sys.maxsize,
+ }
+ self._long_interval_lock = threading.Lock()
+ _StatsbeatMetrics._COMMON_ATTRIBUTES["cikey"] = instrumentation_key
+ if _utils._is_attach_enabled():
+ _StatsbeatMetrics._COMMON_ATTRIBUTES["attach"] = _AttachTypes.INTEGRATED
+ _StatsbeatMetrics._NETWORK_ATTRIBUTES["host"] = _shorten_host(endpoint)
+ _StatsbeatMetrics._FEATURE_ATTRIBUTES["feature"] = self._feature
+ _StatsbeatMetrics._INSTRUMENTATION_ATTRIBUTES["feature"] = _utils.get_instrumentations()
+
+ self._vm_retry = True # True if we want to attempt to find if in VM
+ self._vm_data: Dict[str, str] = {}
+
+ # Initial metrics - metrics exported on application start
+
+ # Attach metrics - metrics related to identifying which rp is application being run in
+ self._attach_metric = self._meter.create_observable_gauge(
+ _ATTACH_METRIC_NAME[0],
+ callbacks=[self._get_attach_metric],
+ unit="",
+ description="Statsbeat metric tracking tracking rp information",
+ )
+
+ # Feature metrics - metrics related to features/instrumentations being used
+ self._feature_metric = self._meter.create_observable_gauge(
+ _FEATURE_METRIC_NAME[0],
+ callbacks=[self._get_feature_metric],
+ unit="",
+ description="Statsbeat metric tracking tracking enabled features",
+ )
+
+ # pylint: disable=unused-argument
+ # pylint: disable=protected-access
+ def _get_attach_metric(self, options: CallbackOptions) -> Iterable[Observation]:
+ observations: List[Observation] = []
+ # Check if it is time to observe long interval metrics
+ if not self._meets_long_interval_threshold(_ATTACH_METRIC_NAME[0]):
+ return observations
+ rp = ""
+ rpId = ""
+ os_type = platform.system()
+ # rp, rpId
+ if _utils._is_on_app_service():
+ # Web apps
+ rp = _RP_Names.APP_SERVICE.value
+ rpId = "{}/{}".format(os.environ.get(_WEBSITE_SITE_NAME), os.environ.get(_WEBSITE_HOME_STAMPNAME, ""))
+ elif _utils._is_on_functions():
+ # Function apps
+ rp = _RP_Names.FUNCTIONS.value
+ rpId = os.environ.get(_WEBSITE_HOSTNAME, "")
+ elif _utils._is_on_aks():
+ # AKS
+ rp = _RP_Names.AKS.value
+ rpId = os.environ.get(_AKS_ARM_NAMESPACE_ID, "")
+ elif self._vm_retry and self._get_azure_compute_metadata():
+ # VM
+ rp = _RP_Names.VM.value
+ rpId = "{}/{}".format(self._vm_data.get("vmId", ""), self._vm_data.get("subscriptionId", ""))
+ os_type = self._vm_data.get("osType", "")
+ else:
+ # Not in any rp or VM metadata failed
+ rp = _RP_Names.UNKNOWN.value
+ rpId = _RP_Names.UNKNOWN.value
+
+ _StatsbeatMetrics._COMMON_ATTRIBUTES["rp"] = rp
+ _StatsbeatMetrics._COMMON_ATTRIBUTES["os"] = os_type or platform.system()
+ attributes = dict(_StatsbeatMetrics._COMMON_ATTRIBUTES)
+ attributes["rpId"] = rpId
+ observations.append(Observation(1, dict(attributes))) # type: ignore
+ return observations
+
+ def _get_azure_compute_metadata(self) -> bool:
+ try:
+ request_url = "{0}?{1}&{2}".format(_AIMS_URI, _AIMS_API_VERSION, _AIMS_FORMAT)
+ response = requests.get(request_url, headers={"MetaData": "True"}, timeout=0.2)
+ except (requests.exceptions.ConnectionError, requests.Timeout):
+ # Not in VM
+ self._vm_retry = False
+ return False
+ except requests.exceptions.RequestException:
+ self._vm_retry = True # retry
+ return False
+
+ try:
+ text = response.text
+ self._vm_data = json.loads(text)
+ except Exception: # pylint: disable=broad-except
+ # Error in reading response body, retry
+ self._vm_retry = True
+ return False
+
+ # Vm data is perpetually updated
+ self._vm_retry = True
+ return True
+
+ # pylint: disable=unused-argument
+ def _get_feature_metric(self, options: CallbackOptions) -> Iterable[Observation]:
+ observations: List[Observation] = []
+ # Check if it is time to observe long interval metrics
+ if not self._meets_long_interval_threshold(_FEATURE_METRIC_NAME[0]):
+ return observations
+ # Feature metric
+ # Check if any features were enabled during runtime
+ if get_statsbeat_custom_events_feature_set():
+ self._feature |= _StatsbeatFeature.CUSTOM_EVENTS_EXTENSION
+ _StatsbeatMetrics._FEATURE_ATTRIBUTES["feature"] = self._feature
+ if get_statsbeat_live_metrics_feature_set():
+ self._feature |= _StatsbeatFeature.LIVE_METRICS
+ _StatsbeatMetrics._FEATURE_ATTRIBUTES["feature"] = self._feature
+
+ # Don't send observation if no features enabled
+ if self._feature is not _StatsbeatFeature.NONE:
+ attributes = dict(_StatsbeatMetrics._COMMON_ATTRIBUTES)
+ attributes.update(_StatsbeatMetrics._FEATURE_ATTRIBUTES) # type: ignore
+ observations.append(Observation(1, dict(attributes))) # type: ignore
+
+ # instrumentation metric
+ # Don't send observation if no instrumentations enabled
+ instrumentation_bits = _utils.get_instrumentations()
+ if instrumentation_bits != 0:
+ _StatsbeatMetrics._INSTRUMENTATION_ATTRIBUTES["feature"] = instrumentation_bits
+ attributes = dict(_StatsbeatMetrics._COMMON_ATTRIBUTES)
+ attributes.update(_StatsbeatMetrics._INSTRUMENTATION_ATTRIBUTES) # type: ignore
+ observations.append(Observation(1, dict(attributes))) # type: ignore
+
+ return observations
+
+ def _meets_long_interval_threshold(self, name) -> bool:
+ with self._long_interval_lock:
+ # if long interval theshold not met, it is not time to export
+ # statsbeat metrics that are long intervals
+ count = self._long_interval_count_map.get(name, sys.maxsize)
+ if count < self._long_interval_threshold:
+ return False
+ # reset the count if long interval theshold is met
+ self._long_interval_count_map[name] = 0
+ return True
+
+ # pylint: disable=W0201
+ def init_non_initial_metrics(self):
+ # Network metrics - metrics related to request calls to ingestion service
+ self._success_count = self._meter.create_observable_gauge(
+ _REQ_SUCCESS_NAME[0],
+ callbacks=[self._get_success_count],
+ unit="count",
+ description="Statsbeat metric tracking request success count",
+ )
+ self._failure_count = self._meter.create_observable_gauge(
+ _REQ_FAILURE_NAME[0],
+ callbacks=[self._get_failure_count],
+ unit="count",
+ description="Statsbeat metric tracking request failure count",
+ )
+ self._retry_count = self._meter.create_observable_gauge(
+ _REQ_RETRY_NAME[0],
+ callbacks=[self._get_retry_count],
+ unit="count",
+ description="Statsbeat metric tracking request retry count",
+ )
+ self._throttle_count = self._meter.create_observable_gauge(
+ _REQ_THROTTLE_NAME[0],
+ callbacks=[self._get_throttle_count],
+ unit="count",
+ description="Statsbeat metric tracking request throttle count",
+ )
+ self._exception_count = self._meter.create_observable_gauge(
+ _REQ_EXCEPTION_NAME[0],
+ callbacks=[self._get_exception_count],
+ unit="count",
+ description="Statsbeat metric tracking request exception count",
+ )
+ self._average_duration = self._meter.create_observable_gauge(
+ _REQ_DURATION_NAME[0],
+ callbacks=[self._get_average_duration],
+ unit="avg",
+ description="Statsbeat metric tracking average request duration",
+ )
+
+ # pylint: disable=unused-argument
+ def _get_success_count(self, options: CallbackOptions) -> Iterable[Observation]:
+ # get_success_count is special in such that it is the indicator of when
+ # a short interval collection has happened, which is why we increment
+ # the long_interval_count when it is called
+ with self._long_interval_lock:
+ for name, count in self._long_interval_count_map.items():
+ self._long_interval_count_map[name] = count + 1
+ observations = []
+ attributes = dict(_StatsbeatMetrics._COMMON_ATTRIBUTES)
+ attributes.update(_StatsbeatMetrics._NETWORK_ATTRIBUTES)
+ attributes["statusCode"] = 200
+ with _REQUESTS_MAP_LOCK:
+ # only observe if value is not 0
+ count = _REQUESTS_MAP.get(_REQ_SUCCESS_NAME[1], 0) # type: ignore
+ if count != 0:
+ observations.append(Observation(int(count), dict(attributes)))
+ _REQUESTS_MAP[_REQ_SUCCESS_NAME[1]] = 0
+ return observations
+
+ # pylint: disable=unused-argument
+ def _get_failure_count(self, options: CallbackOptions) -> Iterable[Observation]:
+ observations = []
+ attributes = dict(_StatsbeatMetrics._COMMON_ATTRIBUTES)
+ attributes.update(_StatsbeatMetrics._NETWORK_ATTRIBUTES)
+ with _REQUESTS_MAP_LOCK:
+ for code, count in _REQUESTS_MAP.get(_REQ_FAILURE_NAME[1], {}).items(): # type: ignore
+ # only observe if value is not 0
+ if count != 0:
+ attributes["statusCode"] = code
+ observations.append(Observation(int(count), dict(attributes)))
+ _REQUESTS_MAP[_REQ_FAILURE_NAME[1]][code] = 0 # type: ignore
+ return observations
+
+ # pylint: disable=unused-argument
+ def _get_average_duration(self, options: CallbackOptions) -> Iterable[Observation]:
+ observations = []
+ attributes = dict(_StatsbeatMetrics._COMMON_ATTRIBUTES)
+ attributes.update(_StatsbeatMetrics._NETWORK_ATTRIBUTES)
+ with _REQUESTS_MAP_LOCK:
+ interval_duration = _REQUESTS_MAP.get(_REQ_DURATION_NAME[1], 0)
+ interval_count = _REQUESTS_MAP.get("count", 0)
+ # only observe if value is not 0
+ if interval_duration > 0 and interval_count > 0: # type: ignore
+ result = interval_duration / interval_count # type: ignore
+ observations.append(Observation(result * 1000, dict(attributes)))
+ _REQUESTS_MAP[_REQ_DURATION_NAME[1]] = 0
+ _REQUESTS_MAP["count"] = 0
+ return observations
+
+ # pylint: disable=unused-argument
+ def _get_retry_count(self, options: CallbackOptions) -> Iterable[Observation]:
+ observations = []
+ attributes = dict(_StatsbeatMetrics._COMMON_ATTRIBUTES)
+ attributes.update(_StatsbeatMetrics._NETWORK_ATTRIBUTES)
+ with _REQUESTS_MAP_LOCK:
+ for code, count in _REQUESTS_MAP.get(_REQ_RETRY_NAME[1], {}).items(): # type: ignore
+ # only observe if value is not 0
+ if count != 0:
+ attributes["statusCode"] = code
+ observations.append(Observation(int(count), dict(attributes)))
+ _REQUESTS_MAP[_REQ_RETRY_NAME[1]][code] = 0 # type: ignore
+ return observations
+
+ # pylint: disable=unused-argument
+ def _get_throttle_count(self, options: CallbackOptions) -> Iterable[Observation]:
+ observations = []
+ attributes = dict(_StatsbeatMetrics._COMMON_ATTRIBUTES)
+ attributes.update(_StatsbeatMetrics._NETWORK_ATTRIBUTES)
+ with _REQUESTS_MAP_LOCK:
+ for code, count in _REQUESTS_MAP.get(_REQ_THROTTLE_NAME[1], {}).items(): # type: ignore
+ # only observe if value is not 0
+ if count != 0:
+ attributes["statusCode"] = code
+ observations.append(Observation(int(count), dict(attributes)))
+ _REQUESTS_MAP[_REQ_THROTTLE_NAME[1]][code] = 0 # type: ignore
+ return observations
+
+ # pylint: disable=unused-argument
+ def _get_exception_count(self, options: CallbackOptions) -> Iterable[Observation]:
+ observations = []
+ attributes = dict(_StatsbeatMetrics._COMMON_ATTRIBUTES)
+ attributes.update(_StatsbeatMetrics._NETWORK_ATTRIBUTES)
+ with _REQUESTS_MAP_LOCK:
+ for code, count in _REQUESTS_MAP.get(_REQ_EXCEPTION_NAME[1], {}).items(): # type: ignore
+ # only observe if value is not 0
+ if count != 0:
+ attributes["exceptionType"] = code
+ observations.append(Observation(int(count), dict(attributes)))
+ _REQUESTS_MAP[_REQ_EXCEPTION_NAME[1]][code] = 0 # type: ignore
+ return observations
+
+
+def _shorten_host(host: str) -> str:
+ if not host:
+ host = ""
+ match = _HOST_PATTERN.match(host)
+ if match:
+ host = match.group(1)
+ return host
+
+
+# cSpell:enable
diff --git a/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/statsbeat/_utils.py b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/statsbeat/_utils.py
new file mode 100644
index 00000000..d1607c12
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/monitor/opentelemetry/exporter/statsbeat/_utils.py
@@ -0,0 +1,69 @@
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License.
+import os
+
+from azure.monitor.opentelemetry.exporter._constants import (
+ _APPLICATIONINSIGHTS_STATS_CONNECTION_STRING_ENV_NAME,
+ _APPLICATIONINSIGHTS_STATS_LONG_EXPORT_INTERVAL_ENV_NAME,
+ _APPLICATIONINSIGHTS_STATS_SHORT_EXPORT_INTERVAL_ENV_NAME,
+ _DEFAULT_NON_EU_STATS_CONNECTION_STRING,
+ _DEFAULT_EU_STATS_CONNECTION_STRING,
+ _DEFAULT_STATS_SHORT_EXPORT_INTERVAL,
+ _DEFAULT_STATS_LONG_EXPORT_INTERVAL,
+ _EU_ENDPOINTS,
+ _REQ_DURATION_NAME,
+ _REQ_SUCCESS_NAME,
+)
+from azure.monitor.opentelemetry.exporter.statsbeat._state import (
+ _REQUESTS_MAP_LOCK,
+ _REQUESTS_MAP,
+)
+
+
+def _get_stats_connection_string(endpoint: str) -> str:
+ cs_env = os.environ.get(_APPLICATIONINSIGHTS_STATS_CONNECTION_STRING_ENV_NAME)
+ if cs_env:
+ return cs_env
+ for endpoint_location in _EU_ENDPOINTS:
+ if endpoint_location in endpoint:
+ # Use statsbeat EU endpoint if user is in EU region
+ return _DEFAULT_EU_STATS_CONNECTION_STRING
+ return _DEFAULT_NON_EU_STATS_CONNECTION_STRING
+
+
+# seconds
+def _get_stats_short_export_interval() -> int:
+ ei_env = os.environ.get(_APPLICATIONINSIGHTS_STATS_SHORT_EXPORT_INTERVAL_ENV_NAME)
+ if ei_env:
+ try:
+ return int(ei_env)
+ except ValueError:
+ return _DEFAULT_STATS_SHORT_EXPORT_INTERVAL
+ return _DEFAULT_STATS_SHORT_EXPORT_INTERVAL
+
+
+# seconds
+def _get_stats_long_export_interval() -> int:
+ ei_env = os.environ.get(_APPLICATIONINSIGHTS_STATS_LONG_EXPORT_INTERVAL_ENV_NAME)
+ if ei_env:
+ try:
+ return int(ei_env)
+ except ValueError:
+ return _DEFAULT_STATS_LONG_EXPORT_INTERVAL
+ return _DEFAULT_STATS_LONG_EXPORT_INTERVAL
+
+
+def _update_requests_map(type_name, value):
+ # value can be either a count, duration, status_code or exc_name
+ with _REQUESTS_MAP_LOCK:
+ # Mapping is {type_name: count/duration}
+ if type_name in (_REQ_SUCCESS_NAME[1], "count", _REQ_DURATION_NAME[1]): # success, count, duration
+ _REQUESTS_MAP[type_name] = _REQUESTS_MAP.get(type_name, 0) + value
+ else: # exception, failure, retry, throttle
+ prev = 0
+ # Mapping is {type_name: {value: count}
+ if _REQUESTS_MAP.get(type_name):
+ prev = _REQUESTS_MAP.get(type_name).get(value, 0)
+ else:
+ _REQUESTS_MAP[type_name] = {}
+ _REQUESTS_MAP[type_name][value] = prev + 1