aboutsummaryrefslogtreecommitdiff
path: root/.venv/lib/python3.12/site-packages/azure/ai/ml/constants
diff options
context:
space:
mode:
Diffstat (limited to '.venv/lib/python3.12/site-packages/azure/ai/ml/constants')
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/constants/__init__.py72
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/constants/_assets.py15
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/constants/_common.py1000
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/constants/_component.py150
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/constants/_compute.py137
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/constants/_deployment.py29
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/constants/_endpoint.py93
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/constants/_finetuning.py17
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/constants/_job/__init__.py38
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/constants/_job/automl.py116
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/constants/_job/distillation.py16
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/constants/_job/finetuning.py26
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/constants/_job/job.py165
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/constants/_job/pipeline.py64
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/constants/_job/sweep.py22
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/constants/_monitoring.py123
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/constants/_registry.py40
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/constants/_workspace.py56
18 files changed, 2179 insertions, 0 deletions
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/constants/__init__.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/constants/__init__.py
new file mode 100644
index 00000000..f26edcb4
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/constants/__init__.py
@@ -0,0 +1,72 @@
+# ---------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# ---------------------------------------------------------
+"""This package defines constants used in Azure Machine Learning SDKv2."""
+
+__path__ = __import__("pkgutil").extend_path(__path__, __name__)
+
+from azure.ai.ml._restclient.v2023_10_01.models import ListViewType
+
+from ._assets import IPProtectionLevel
+from ._common import AssetTypes, InputOutputModes, InputTypes, ModelType, Scope, TimeZone, WorkspaceKind
+from ._component import ParallelTaskType
+from ._deployment import BatchDeploymentOutputAction
+from ._job import (
+ DataGenerationTaskType,
+ DataGenerationType,
+ DistributionType,
+ ImageClassificationModelNames,
+ ImageInstanceSegmentationModelNames,
+ ImageObjectDetectionModelNames,
+ ImportSourceType,
+ JobType,
+ NlpLearningRateScheduler,
+ NlpModels,
+ TabularTrainingMode,
+)
+from ._monitoring import (
+ MonitorDatasetContext,
+ MonitorFeatureType,
+ MonitorMetricName,
+ MonitorModelType,
+ MonitorSignalType,
+ MonitorTargetTasks,
+)
+from ._registry import AcrAccountSku, StorageAccountType
+from ._workspace import ManagedServiceIdentityType
+
+TabularTrainingMode.__module__ = __name__
+
+__all__ = [
+ "DataGenerationTaskType",
+ "DataGenerationType",
+ "ImportSourceType",
+ "JobType",
+ "ParallelTaskType",
+ "AssetTypes",
+ "InputTypes",
+ "InputOutputModes",
+ "DistributionType",
+ "TimeZone",
+ "BatchDeploymentOutputAction",
+ "ModelType",
+ "ManagedServiceIdentityType",
+ "ImageClassificationModelNames",
+ "ImageObjectDetectionModelNames",
+ "ImageInstanceSegmentationModelNames",
+ "StorageAccountType",
+ "AcrAccountSku",
+ "NlpModels",
+ "NlpLearningRateScheduler",
+ "Scope",
+ "TabularTrainingMode",
+ "MonitorSignalType",
+ "MonitorMetricName",
+ "MonitorModelType",
+ "MonitorFeatureType",
+ "MonitorDatasetContext",
+ "MonitorTargetTasks",
+ "IPProtectionLevel",
+ "ListViewType",
+ "WorkspaceKind",
+]
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/constants/_assets.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/constants/_assets.py
new file mode 100644
index 00000000..aadfae7e
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/constants/_assets.py
@@ -0,0 +1,15 @@
+# ---------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# ---------------------------------------------------------
+from enum import Enum
+
+from azure.core import CaseInsensitiveEnumMeta
+
+from azure.ai.ml._utils._experimental import experimental
+
+
+@experimental
+class IPProtectionLevel(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ "Intellectual property protection level."
+ ALL = "all"
+ NONE = "none"
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/constants/_common.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/constants/_common.py
new file mode 100644
index 00000000..647b261f
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/constants/_common.py
@@ -0,0 +1,1000 @@
+# ---------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# ---------------------------------------------------------
+from enum import Enum
+
+from azure.core import CaseInsensitiveEnumMeta
+
+AZUREML_CLOUD_ENV_NAME = "AZUREML_CURRENT_CLOUD"
+API_VERSION_2020_09_01_PREVIEW = "2020-09-01-preview"
+API_VERSION_2020_09_01_DATAPLANE = "2020-09-01-dataplanepreview"
+ONLINE_ENDPOINT_TYPE = "online"
+BATCH_ENDPOINT_TYPE = "batch"
+BASE_PATH_CONTEXT_KEY = "base_path"
+SOURCE_PATH_CONTEXT_KEY = "source_path"
+PARAMS_OVERRIDE_KEY = "params_override"
+TYPE = "type"
+JOBLIMITSTYPE = "JobLimitsType"
+DATA_ARM_TYPE = "data"
+ARM_ID_PREFIX = "azureml:"
+PROMPTFLOW_AZUREML_OVERRIDE_KEY = "azureml"
+CURATED_ENV_PREFIX = "AzureML-"
+FILE_PREFIX = "file:"
+FOLDER_PREFIX = "folder:"
+HTTP_PREFIX = "http"
+HTTPS_PREFIX = "https"
+ARM_ID_FULL_PREFIX = "/subscriptions/"
+AZUREML_RESOURCE_PROVIDER = "Microsoft.MachineLearningServices"
+RESOURCE_ID_FORMAT = "/subscriptions/{}/resourceGroups/{}/providers/{}/workspaces/{}"
+NAMED_RESOURCE_ID_FORMAT = "/subscriptions/{}/resourceGroups/{}/providers/{}/workspaces/{}/{}/{}"
+NAMED_RESOURCE_ID_FORMAT_WITH_PARENT = "/subscriptions/{}/resourceGroups/{}/providers/{}/workspaces/{}/{}/{}/{}/{}"
+LEVEL_ONE_NAMED_RESOURCE_ID_FORMAT = "/subscriptions/{}/resourceGroups/{}/providers/{}/{}/{}"
+VERSIONED_RESOURCE_ID_FORMAT = "/subscriptions/{}/resourceGroups/{}/providers/{}/workspaces/{}/{}/{}/versions/{}"
+LABELLED_RESOURCE_ID_FORMAT = "/subscriptions/{}/resourceGroups/{}/providers/{}/workspaces/{}/{}/{}/labels/{}"
+DATASTORE_RESOURCE_ID = (
+ "/subscriptions/{}/resourceGroups/{}/providers/Microsoft.MachineLearningServices/workspaces/{}/datastores/{}"
+)
+PROVIDER_RESOURCE_ID_WITH_VERSION = (
+ "/subscriptions/{}/resourceGroups/{}/providers/Microsoft.MachineLearningServices/workspaces/{}/{}/{}/versions/{}"
+)
+SINGULARITY_ID_FORMAT = (
+ "/subscriptions/{}/resourceGroups/{}/providers/Microsoft.MachineLearningServices/virtualclusters/{}"
+)
+SINGULARITY_ID_REGEX_FORMAT = (
+ "/subscriptions/.*/resourceGroups/.*/providers/Microsoft.MachineLearningServices/virtualclusters/.*"
+)
+SINGULARITY_FULL_NAME_REGEX_FORMAT = (
+ "^(azureml:)?//subscriptions/(?P<subscription_id>[^/]+)/resourceGroups/(?P<resource_group_name>[^/]+)/"
+ "virtualclusters/(?P<name>[^/]+)"
+)
+SINGULARITY_SHORT_NAME_REGEX_FORMAT = "^(azureml:)?//virtualclusters/(?P<name>[^/]+)"
+ASSET_ID_FORMAT = "azureml://locations/{}/workspaces/{}/{}/{}/versions/{}"
+VERSIONED_RESOURCE_NAME = "{}:{}"
+LABELLED_RESOURCE_NAME = "{}@{}"
+LABEL_SPLITTER = "@"
+PYTHON = "python"
+AML_TOKEN_YAML = "aml_token"
+AAD_TOKEN_YAML = "aad_token"
+KEY = "key"
+AAD_TOKEN = "aadtoken"
+AAD_TOKEN_RESOURCE_ENDPOINT = "https://ml.azure.com"
+EMPTY_CREDENTIALS_ERROR = (
+ "Credentials unavailable. Initialize credentials using 'MLClient' for SDK or 'az login' for CLI."
+)
+DEFAULT_ARM_RETRY_INTERVAL = 60
+COMPONENT_TYPE = "type"
+TID_FMT = "&tid={}"
+AZUREML_PRIVATE_FEATURES_ENV_VAR = "AZURE_ML_CLI_PRIVATE_FEATURES_ENABLED"
+AZUREML_INTERNAL_COMPONENTS_ENV_VAR = "AZURE_ML_INTERNAL_COMPONENTS_ENABLED"
+AZUREML_DISABLE_ON_DISK_CACHE_ENV_VAR = "AZURE_ML_DISABLE_ON_DISK_CACHE"
+AZUREML_COMPONENT_REGISTRATION_MAX_WORKERS = "AZURE_ML_COMPONENT_REGISTRATION_MAX_WORKERS"
+AZUREML_DISABLE_CONCURRENT_COMPONENT_REGISTRATION = "AZURE_ML_DISABLE_CONCURRENT_COMPONENT_REGISTRATION"
+AZUREML_INTERNAL_COMPONENTS_SCHEMA_PREFIX = "https://componentsdk.azureedge.net/jsonschema/"
+COMMON_RUNTIME_ENV_VAR = "AZUREML_COMPUTE_USE_COMMON_RUNTIME"
+ENDPOINT_DEPLOYMENT_START_MSG = (
+ "{}/#blade/HubsExtension/DeploymentDetailsBlade/overview/id/"
+ "%2Fsubscriptions%2F{}%2FresourceGroups%2F{}%2Fproviders%2FMicrosoft.Resources%2Fdeployments%2F{}\n"
+)
+AZUREML_LOCAL_ENDPOINTS_NOT_IMPLEMENTED_ERROR = "This operation for local endpoints is not supported yet."
+BATCH_JOB_NOT_SUPPORTED_ERROR_CODE = "BatchJobNotSupported"
+ENVIRONMENT_VARIABLES = "environment_variables"
+LIMITED_RESULTSET_WARNING_FORMAT = "Displaying top {} results from the list command."
+MAX_LIST_CLI_RESULTS = 50
+LOCAL_COMPUTE_TARGET = "local"
+LOCAL_COMPUTE_PROPERTY = "IsLocal"
+SERVERLESS_COMPUTE = "serverless"
+CONDA_FILE = "conda_file"
+DOCKER_FILE_NAME = "Dockerfile"
+COMPUTE_UPDATE_ERROR = (
+ "Only AmlCompute/KubernetesCompute cluster properties are supported, compute name {}, is {} type."
+)
+MAX_AUTOINCREMENT_ATTEMPTS = 3
+REGISTRY_URI_REGEX_FORMAT = "azureml://registries/*"
+REGISTRY_URI_FORMAT = "azureml://registries/"
+INTERNAL_REGISTRY_URI_FORMAT = "azureml://feeds/"
+REGISTRY_VERSION_PATTERN = "^azureml://registries/([^/]+)/([^/]+)/([^/]+)/versions/([^/]+)"
+REGISTRY_ASSET_ID = "azureml://registries/{}/{}/{}/versions/{}"
+SHORT_URI_FORMAT = "azureml://datastores/{}/paths/{}"
+DATASTORE_SHORT_URI = "azureml://datastores/"
+MLFLOW_URI_FORMAT = "runs:/{}/{}"
+JOB_URI_FORMAT = "azureml://jobs/{}/outputs/{}/paths/{}"
+LONG_URI_FORMAT = "azureml://subscriptions/{}/resourcegroups/{}/workspaces/{}/datastores/{}/paths/{}"
+SHORT_URI_REGEX_FORMAT = "azureml://datastores/([^/]+)/paths/(.+)"
+MLFLOW_URI_REGEX_FORMAT = "runs:/([^/?]+)/(.+)"
+AZUREML_REGEX_FORMAT = "azureml:([^/]+):(.+)"
+JOB_URI_REGEX_FORMAT = "azureml://jobs/([^/]+)/outputs/([^/]+)/paths/(.+)"
+OUTPUT_URI_REGEX_FORMAT = "azureml://datastores/([^/]+)/(ExperimentRun/.+)"
+LONG_URI_REGEX_FORMAT = (
+ "azureml://subscriptions/([^/]+)/resource[gG]roups/([^/]+)/workspaces/([^/]+)/datastores/([^/]+)/paths/(.+)"
+)
+ASSET_ARM_ID_REGEX_FORMAT = (
+ "azureml:/subscriptions/([^/]+)/resource[gG]roups/([^/]+)/"
+ "providers/Microsoft.MachineLearningServices/workspaces/([^/]+)/([^/]+)/([^/]+)/versions/(.+)"
+)
+ASSET_ID_REGEX_FORMAT = (
+ "azureml://subscriptions/([^/]+)/resource[gG]roups/([^/]+)/workspaces/([^/]+)/([^/]+)/([^/]+)/versions/(.+)"
+)
+ASSET_ID_RESOURCE_REGEX_FORMAT = "azureml://resource[gG]roups/([^/]+)/workspaces/([^/]+)/([^/]+)/([^/]+)/versions/(.+)"
+MODEL_ID_REGEX_FORMAT = "azureml://models/([^/]+)/versions/(.+)"
+DATA_ID_REGEX_FORMAT = "azureml://data/([^/]+)/versions/(.+)"
+ASSET_ID_URI_REGEX_FORMAT = "azureml://locations/([^/]+)/workspaces/([^/]+)/([^/]+)/([^/]+)/versions/(.+)"
+AZUREML_CLI_SYSTEM_EXECUTED_ENV_VAR = "AZUREML_CLI_SYSTEM_EXECUTED"
+DOCSTRING_TEMPLATE = ".. note:: {0} {1}\n\n"
+DOCSTRING_DEFAULT_INDENTATION = 8
+EXPERIMENTAL_CLASS_MESSAGE = "This is an experimental class,"
+EXPERIMENTAL_METHOD_MESSAGE = "This is an experimental method,"
+EXPERIMENTAL_FIELD_MESSAGE = "This is an experimental field,"
+EXPERIMENTAL_LINK_MESSAGE = (
+ "and may change at any time. Please see https://aka.ms/azuremlexperimental for more information."
+)
+REF_DOC_YAML_SCHEMA_ERROR_MSG_FORMAT = "\nVisit this link to refer to the {} schema if needed: {}."
+STORAGE_AUTH_MISMATCH_ERROR = "AuthorizationPermissionMismatch"
+SWEEP_JOB_BEST_CHILD_RUN_ID_PROPERTY_NAME = "best_child_run_id"
+BATCH_JOB_CHILD_RUN_OUTPUT_NAME = "score"
+DEFAULT_ARTIFACT_STORE_OUTPUT_NAME = "default"
+DEFAULT_EXPERIMENT_NAME = "Default"
+
+CREATE_ENVIRONMENT_ERROR_MESSAGE = (
+ "It looks like you are trying to specify a conda file for the --file/-f argument. "
+ "--file/-f is reserved for the Azure ML Environment definition (see schema here: {}). "
+ "To specify a conda file via command-line argument, please use --conda-file/-c argument."
+)
+ANONYMOUS_ENV_NAME = "CliV2AnonymousEnvironment"
+SKIP_VALIDATION_MESSAGE = "To skip this validation use the --skip-validation param"
+MLTABLE_METADATA_SCHEMA_URL_FALLBACK = "https://azuremlschemasprod.azureedge.net/latest/MLTable.schema.json"
+INVOCATION_BAT_FILE = "Invocation.bat"
+INVOCATION_BASH_FILE = "Invocation.sh"
+AZUREML_RUN_SETUP_DIR = "azureml-setup"
+AZUREML_RUNS_DIR = "azureml_runs"
+EXECUTION_SERVICE_URL_KEY = "&fake="
+LOCAL_JOB_FAILURE_MSG = "Failed to start local executable job.\n Detailed message: {}"
+STORAGE_ACCOUNT_URLS = {
+ "AzureBlob": "https://{}.blob.{}",
+ "AzureDataLakeGen2": "https://{}.dfs.{}",
+ "AzureFile": "https://{}.file.{}",
+}
+DEFAULT_STORAGE_CONNECTION_NAME = "workspaceblobstore"
+
+DEFAULT_LABEL_NAME = "default"
+DEFAULT_COMPONENT_VERSION = "azureml_default"
+ANONYMOUS_COMPONENT_NAME = "azureml_anonymous"
+GIT_PATH_PREFIX = "git+"
+SCHEMA_VALIDATION_ERROR_TEMPLATE = (
+ "\n{text_color}{description}\n{error_msg}{reset}\n\n"
+ "Details: {parsed_error_details}\n"
+ "Resolutions: {resolutions}"
+ "If using the CLI, you can also check the full log in debug mode for more details by adding --debug "
+ "to the end of your command\n"
+ "\nAdditional Resources: The easiest way to author a yaml specification file is using IntelliSense and "
+ "auto-completion Azure ML VS code extension provides: "
+ "{link_color}https://code.visualstudio.com/docs/datascience/azure-machine-learning.{reset} "
+ "To set up VS Code, visit {link_color}https://learn.microsoft.com/azure/machine-learning/how-to-setup-vs-"
+ "code{reset}\n"
+)
+
+YAML_CREATION_ERROR_DESCRIPTION = (
+ "The yaml file you provided does not match the prescribed schema "
+ "for {entity_type} yaml files and/or has the following issues:\n"
+)
+DATASTORE_SCHEMA_TYPES = [
+ "AzureFileSchema",
+ "AzureBlobSchema",
+ "AzureDataLakeGen2Schema",
+ "AzureStorageSchema",
+ "AzureDataLakeGen1Schema",
+]
+LOCAL_PATH = "local_path"
+SPARK_ENVIRONMENT_WARNING_MESSAGE = (
+ "Spark job will only install the packages defined in the Conda configuration. It "
+ "will not create a docker container using the image defined in the environment."
+)
+CONNECTION_API_VERSION_KEY = "ApiVersion"
+CONNECTION_API_TYPE_KEY = "ApiType"
+CONNECTION_KIND_KEY = "Kind"
+CONNECTION_CONTAINER_NAME_KEY = "ContainerName"
+CONNECTION_ACCOUNT_NAME_KEY = "AccountName"
+CONNECTION_RESOURCE_ID_KEY = "ResourceId"
+
+# Deprecated tag keys that cause workspace patch operations to fail
+# Patch operations are used by the workspace begin_upcate operation,
+# but not begin_create_or_update. Once the former is replaced with the
+# latter, we can remove this list.
+WORKSPACE_PATCH_REJECTED_KEYS = ["AttachKeyVaultToWorkspace", "AttachAppInsightsToWorkspace"]
+
+
+class WorkspaceDiscoveryUrlKey(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ """Enum that captures keys URL types returned from querying a workspace's discovery url."""
+
+ API = "api"
+ CATALOG = "catalog"
+ EXPERIMENTATION = "experimentation"
+ GALLERY = "gallery"
+ HISTORY = "history"
+ HYPERDRIVE = "hyperdrive"
+ LABELING = "labeling"
+ MODEL_MANAGEMENT = "modelmanagement"
+ PIPELINES = "pipelines"
+ STUDIO = "studio"
+
+
+class DefaultOpenEncoding:
+ """Enum that captures SDK's default values for the encoding param of open(...)"""
+
+ READ = "utf-8-sig"
+ """SDK Default Encoding when reading a file"""
+ WRITE = "utf-8"
+ """SDK Default Encoding when writing a file"""
+
+
+class AzureMLResourceType:
+ """AzureMLResourceType is a class that defines the resource types that are supported by the SDK/CLI."""
+
+ CODE = "codes"
+ """Code resource type."""
+ COMPUTE = "computes"
+ """Compute resource type."""
+ DATA = "data"
+ """Data resource type."""
+ DATASTORE = "datastores"
+ """Datastore resource type."""
+ ONLINE_ENDPOINT = "online_endpoints"
+ """Online endpoint resource type."""
+ BATCH_ENDPOINT = "batch_endpoints"
+ """Batch endpoint resource type."""
+ ONLINE_DEPLOYMENT = "online_deployments"
+ """Online deployment resource type."""
+ DEPLOYMENT = "deployments"
+ """Deployment resource type."""
+ BATCH_DEPLOYMENT = "batch_deployments"
+ """Batch deployment resource type."""
+ ENVIRONMENT = "environments"
+ """Environment resource type."""
+ JOB = "jobs"
+ """Job resource type."""
+ MODEL = "models"
+ """Model resource type."""
+ VIRTUALCLUSTER = "virtualclusters"
+ """Virtual cluster resource type."""
+ WORKSPACE = "workspaces"
+ """Workspace resource type."""
+ CAPABILITY_HOST = "capability_hosts"
+ """Capability_Host resource type."""
+ CONNECTION = "connections"
+ """connection resource type."""
+ COMPONENT = "components"
+ """Component resource type."""
+ SCHEDULE = "schedules"
+ """Schedule resource type."""
+ REGISTRY = "registries"
+ """Registry resource type."""
+ CONNECTIONS = "connections"
+ """Connections resource type."""
+ FEATURE_SET = "feature_sets"
+ """Feature set resource type."""
+ FEATURE_STORE_ENTITY = "feature_store_entities"
+ """Feature store entity resource type."""
+ FEATURE_STORE = "feature_store"
+ """Feature store resource type."""
+ HUB = "hub"
+ """Hub resource type."""
+ PROJECT = "project"
+ """Project resource type."""
+ SERVERLESS_ENDPOINT = "serverless_endpoints"
+ """Serverless endpoint resource type."""
+ MARKETPLACE_SUBSCRIPTION = "marketplace_subscriptions"
+ """Marketplace subscription resource type."""
+ INDEX = "indexes"
+ """Index resource type."""
+
+ NAMED_TYPES = {
+ JOB,
+ COMPUTE,
+ WORKSPACE,
+ ONLINE_ENDPOINT,
+ ONLINE_DEPLOYMENT,
+ DATASTORE,
+ SCHEDULE,
+ }
+ VERSIONED_TYPES = {MODEL, DATA, CODE, ENVIRONMENT, COMPONENT, FEATURE_SET, FEATURE_STORE_ENTITY, INDEX}
+
+
+class ArmConstants:
+ """ArmConstants is a class that defines the constants used by the SDK/CLI for ARM operations.
+
+ ArmConstants are used to define the names of the parameters that are used in the ARM templates that are used by the
+ SDK/CLI.
+ """
+
+ CODE_PARAMETER_NAME = "codes"
+ CODE_VERSION_PARAMETER_NAME = "codeVersions"
+ MODEL_PARAMETER_NAME = "models"
+ MODEL_VERSION_PARAMETER_NAME = "modelVersions"
+ ENVIRONMENT_PARAMETER_NAME = "environments"
+ WORKSPACE_PARAMETER_NAME = "workspaceName"
+ LOCATION_PARAMETER_NAME = "location"
+ ENDPOINT_IDENTITY_PARAMETER_NAME = "onlineEndpointIdentity"
+ ENDPOINT_PARAMETER_NAME = "onlineEndpoint"
+ ENDPOINT_PROPERTIES_PARAMETER_NAME = "onlineEndpointProperties"
+ ENDPOINT_PROPERTIES_TRAFFIC_UPDATE_PARAMETER_NAME = "onlineEndpointPropertiesTrafficUpdate"
+ ENDPOINT_NAME_PARAMETER_NAME = "onlineEndpointName"
+ ENDPOINT_TAGS_PARAMETER_NAME = "onlineEndpointTags"
+ DEPLOYMENTS_PARAMETER_NAME = "onlineDeployments"
+ PROPERTIES_PARAMETER_NAME = "properties"
+ DEPENDSON_PARAMETER_NAME = "dependsOn"
+ TRAFFIC_PARAMETER_NAME = "trafficRules"
+ CODE_RESOURCE_NAME = "codeDeploymentCopy"
+ CODE_VERSION_RESOURCE_NAME = "codeVersionDeploymentCopy"
+ MODEL_RESOURCE_NAME = "modelDeploymentCopy"
+ MODEL_VERSION_RESOURCE_NAME = "modelVersionDeploymentCopy"
+ ENVIRONMENT_VERSION_RESOURCE_NAME = "environmentVersionDeploymentCopy"
+ ONLINE_DEPLOYMENT_RESOURCE_NAME = "onlineDeploymentCopy"
+ ONLINE_ENDPOINT_RESOURCE_NAME = "onlineEndpointCopy"
+ UPDATE_RESOURCE_NAME = "updateEndpointWithTraffic"
+ ENDPOINT_CREATE_OR_UPDATE_PARAMETER_NAME = "endpointCreateOrUpdate"
+ TAGS = "tags"
+ SKU = "sku"
+ KEY_VAULT_PARAMETER_NAME = "vaults"
+ STORAGE_ACCOUNT_PARAMETER_NAME = "storageAccounts"
+ APP_INSIGHTS_PARAMETER_NAME = "components"
+ CONTAINER_REGISTRY_PARAMETER_NAME = "registries"
+ USER_ASSIGNED_IDENTITIES = "userAssignedIdentities"
+
+ CODE_TYPE = "code"
+ CODE_VERSION_TYPE = "code_version"
+ MODEL_TYPE = "model"
+ MODEL_VERSION_TYPE = "model_version"
+ ENVIRONMENT_TYPE = "environment"
+ ENVIRONMENT_VERSION_TYPE = "environment_version"
+ ONLINE_ENDPOINT_TYPE = "online_endpoint"
+ MULTIPLE_ENDPOINTS_TYPE = "endpoints"
+ ONLINE_DEPLOYMENT_TYPE = "online_deployment"
+ UPDATE_ONLINE_ENDPOINT_TYPE = "update_online_endpoint"
+ BASE_TYPE = "base"
+ WORKSPACE_BASE = "workspace_base"
+ WORKSPACE_PARAM = "workspace_param"
+ ROLE_ASSIGNMENTS = "roleAssignments"
+ FEATURE_STORE_ROLE_ASSIGNMENTS = "feature_store_role_assignments"
+ FEATURE_STORE_ROLE_ASSIGNMENTS_PARAM = "feature_store_role_assignments_param"
+ WORKSPACE_PROJECT = "workspace_project"
+
+ OPERATION_CREATE = "create"
+ OPERATION_UPDATE = "update"
+ NAME = "name"
+ VERSION = "version"
+ ASSET_PATH = "assetPath"
+ DATASTORE_ID = "datastoreId"
+ OBJECT = "Object"
+ ARRAY = "Array"
+ STRING = "String"
+ DEFAULT_VALUE = "defaultValue"
+
+ STORAGE = "StorageAccount"
+ STORAGE_CONTAINER = "StorageContainer"
+ KEY_VAULT = "KeyVault"
+ APP_INSIGHTS = "AppInsights"
+ LOG_ANALYTICS = "LogAnalytics"
+ WORKSPACE = "Workspace"
+ CONTAINER_REGISTRY = "ContainerRegistry"
+
+ AZURE_MGMT_RESOURCE_API_VERSION = "2020-06-01"
+ AZURE_MGMT_STORAGE_API_VERSION = "2019-06-01"
+ AZURE_MGMT_APPINSIGHT_API_VERSION = "2015-05-01"
+ AZURE_MGMT_LOGANALYTICS_API_VERSION = "2015-03-20"
+ AZURE_MGMT_KEYVAULT_API_VERSION = "2019-09-01"
+ AZURE_MGMT_CONTAINER_REG_API_VERSION = "2019-05-01"
+
+ DEFAULT_URL = "https://management.azure.com/metadata/endpoints?api-version=2019-05-01"
+ METADATA_URL_ENV_NAME = "ARM_CLOUD_METADATA_URL"
+ REGISTRY_DISCOVERY_DEFAULT_REGION = "west"
+ REGISTRY_DISCOVERY_REGION_ENV_NAME = "REGISTRY_DISCOVERY_ENDPOINT_REGION"
+ REGISTRY_ENV_URL = "REGISTRY_DISCOVERY_ENDPOINT_URL"
+
+
+class HttpResponseStatusCode:
+ """Http response status code."""
+
+ NOT_FOUND = 404
+ """Not found."""
+
+
+class OperationStatus:
+ """Operation status class.
+
+ Operation status is used to indicate the status of an operation. It can be one of the following values: Succeeded,
+ Failed, Canceled, Running.
+ """
+
+ SUCCEEDED = "Succeeded"
+ """Succeeded."""
+ FAILED = "Failed"
+ """Failed."""
+ CANCELED = "Canceled"
+ """Canceled."""
+ RUNNING = "Running"
+ """Running."""
+
+
+class CommonYamlFields:
+ """Common yaml fields.
+
+ Common yaml fields are used to define the common fields in yaml files. It can be one of the following values: type,
+ name, $schema.
+ """
+
+ TYPE = "type"
+ """Type."""
+ NAME = "name"
+ """Name."""
+ SCHEMA = "$schema"
+ """Schema."""
+ KIND = "kind"
+
+
+class SchemaUrl:
+ """Schema urls.
+
+ Schema urls will be used in VSCode extension to validate yaml files. It will also be used to identify the
+ corresponding entity type of a yaml file, especially for some internal yaml files.
+ """
+
+ PROMPTFLOW_PREFIX = "https://azuremlschemas.azureedge.net/promptflow/"
+ PROMPTFLOW_FLOW = PROMPTFLOW_PREFIX + "latest/Flow.schema.json"
+ PROMPTFLOW_RUN = PROMPTFLOW_PREFIX + "latest/Run.schema.json"
+
+
+class GitProperties:
+ """GitProperties is a class that defines the constants used by the SDK/CLI for Git operations.
+
+ Gitproperties are used to define the names of the properties that are used in the Git operations that are used by
+ the SDK/CLI. These properties are used to set the Git properties in the run history.
+ """
+
+ ENV_REPOSITORY_URI = "AZUREML_GIT_REPOSITORY_URI"
+ ENV_BRANCH = "AZUREML_GIT_BRANCH"
+ ENV_COMMIT = "AZUREML_GIT_COMMIT"
+ ENV_DIRTY = "AZUREML_GIT_DIRTY"
+ ENV_BUILD_ID = "AZUREML_GIT_BUILD_ID"
+ ENV_BUILD_URI = "AZUREML_GIT_BUILD_URI"
+
+ PROP_DIRTY = "azureml.git.dirty"
+ PROP_BUILD_ID = "azureml.git.build_id"
+ PROP_BUILD_URI = "azureml.git.build_uri"
+
+ PROP_MLFLOW_GIT_BRANCH = "mlflow.source.git.branch"
+ PROP_MLFLOW_GIT_COMMIT = "mlflow.source.git.commit"
+ PROP_MLFLOW_GIT_REPO_URL = "mlflow.source.git.repoURL"
+
+
+class LROConfigurations:
+ """LRO configurations class.
+
+ LRO configurations are used to define the configurations for long running operations. It can be one of the following
+ values: MAX_WAIT_COUNT, POLLING_TIMEOUT, POLL_INTERVAL, SLEEP_TIME.
+ """
+
+ MAX_WAIT_COUNT = 400
+ """Max wait count."""
+ POLLING_TIMEOUT = 720
+ """Polling timeout."""
+ POLL_INTERVAL = 5
+ """Poll interval."""
+ SLEEP_TIME = 5
+ """Sleep time."""
+
+
+class OrderString:
+ """Order string class.
+
+ Order string is used to define the order string for list operations. It can be one of the following values:
+ CREATED_AT, CREATED_AT_DESC.
+ """
+
+ CREATED_AT = "createdtime asc"
+ """Created at."""
+ CREATED_AT_DESC = "createdtime desc"
+ """Created at desc."""
+
+
+class YAMLRefDocLinks:
+ """YAML reference document links.
+
+ YAML reference document links are used to define the reference document links for yaml files.
+ """
+
+ WORKSPACE = "https://aka.ms/ml-cli-v2-workspace-yaml-reference"
+ ENVIRONMENT = "https://aka.ms/ml-cli-v2-environment-yaml-reference"
+ DATA = "https://aka.ms/ml-cli-v2-data-yaml-reference"
+ MODEL = "https://aka.ms/ml-cli-v2-model-yaml-reference"
+ AML_COMPUTE = "https://aka.ms/ml-cli-v2-compute-aml-yaml-reference"
+ COMPUTE_INSTANCE = "https://aka.ms/ml-cli-v2-compute-instance-yaml-reference"
+ VIRTUAL_MACHINE_COMPUTE = "https://aka.ms/ml-cli-v2-compute-vm-yaml-reference"
+ COMMAND_JOB = "https://aka.ms/ml-cli-v2-job-command-yaml-reference"
+ PARALLEL_JOB = "https://aka.ms/ml-cli-v2-job-parallel-yaml-reference"
+ SWEEP_JOB = "https://aka.ms/ml-cli-v2-job-sweep-yaml-reference"
+ PIPELINE_JOB = "https://aka.ms/ml-cli-v2-job-pipeline-yaml-reference"
+ DATASTORE_BLOB = "https://aka.ms/ml-cli-v2-datastore-blob-yaml-reference"
+ DATASTORE_FILE = "https://aka.ms/ml-cli-v2-datastore-file-yaml-reference"
+ DATASTORE_DATA_LAKE_GEN_1 = "https://aka.ms/ml-cli-v2-datastore-data-lake-gen1-yaml-reference"
+ DATASTORE_DATA_LAKE_GEN_2 = "https://aka.ms/ml-cli-v2-datastore-data-lake-gen2-yaml-reference"
+ ONLINE_ENDPOINT = "https://aka.ms/ml-cli-v2-endpoint-online-yaml-reference"
+ BATCH_ENDPOINT = "https://aka.ms/ml-cli-v2-endpoint-batch-yaml-reference"
+ MANAGED_ONLINE_DEPLOYMENT = "https://aka.ms/ml-cli-v2-deployment-managed-online-yaml-reference"
+ KUBERNETES_ONLINE_DEPLOYMENT = "https://aka.ms/ml-cli-v2-deployment-kubernetes-online-yaml-reference"
+ BATCH_DEPLOYMENT = "https://aka.ms/ml-cli-v2-deployment-batch-yaml-reference"
+ COMMAND_COMPONENT = "https://aka.ms/ml-cli-v2-component-command-yaml-reference"
+ PARALLEL_COMPONENT = "https://aka.ms/ml-cli-v2-component-parallel-yaml-reference"
+ JOB_SCHEDULE = "https://aka.ms/ml-cli-v2-schedule-yaml-reference"
+ REGISTRY = "https://aka.ms/ml-cli-v2-registry-yaml-reference"
+ FEATURE_STORE = "https://aka.ms/ml-cli-v2-featurestore-yaml-reference"
+ FEATURE_SET = "https://aka.ms/ml-cli-v2-featureset-yaml-reference"
+ FEATURE_STORE_ENTITY = "https://aka.ms/ml-cli-v2-featurestore-entity-yaml-reference"
+ HUB = "https://aka.ms/ml-cli-v2-workspace-hub-entity-yaml-reference"
+ CAPABILITY_HOST = "https://aka.ms/ml-cli-v2-capability-host-yaml-reference"
+
+
+class YAMLRefDocSchemaNames:
+ """YAML reference document schema names.
+
+ YAML reference document schema names are used to define the reference document schema names for yaml files.
+ """
+
+ WORKSPACE = "Workspace"
+ """Workspace."""
+ ENVIRONMENT = "Environment"
+ """Environment."""
+ DATA = "Data"
+ """Data."""
+ MODEL = "Model"
+ """Model."""
+ AML_COMPUTE = "AMLCompute"
+ """AML compute."""
+ COMPUTE_INSTANCE = "ComputeInstance"
+ """Compute instance."""
+ VIRTUAL_MACHINE_COMPUTE = "VirtualMachineCompute"
+ """Virtual machine compute."""
+ COMMAND_JOB = "CommandJob"
+ """Command job."""
+ SWEEP_JOB = "SweepJob"
+ """Sweep job."""
+ PARALLEL_JOB = "ParallelJob"
+ """Parallel job."""
+ PIPELINE_JOB = "PipelineJob"
+ """Pipeline job."""
+ DATASTORE_BLOB = "AzureBlobDatastore"
+ """Azure blob datastore."""
+ DATASTORE_FILE = "AzureFileDatastore"
+ """Azure file datastore."""
+ DATASTORE_DATA_LAKE_GEN_1 = "AzureDataLakeGen1Datastore"
+ """Azure data lake gen 1 datastore."""
+ DATASTORE_DATA_LAKE_GEN_2 = "AzureDataLakeGen2Datastore"
+ """Azure data lake gen 2 datastore."""
+ ONLINE_ENDPOINT = "OnlineEndpoint"
+ """Online endpoint."""
+ BATCH_ENDPOINT = "BatchEndpoint"
+ """Batch endpoint."""
+ MANAGED_ONLINE_DEPLOYMENT = "ManagedOnlineDeployment"
+ """Managed online deployment."""
+ KUBERNETES_ONLINE_DEPLOYMENT = "KubernetesOnlineDeployment"
+ """Kubernetes online deployment."""
+ BATCH_DEPLOYMENT = "BatchDeployment"
+ """Batch deployment."""
+ COMMAND_COMPONENT = "CommandComponent"
+ """Command component."""
+ PARALLEL_COMPONENT = "ParallelComponent"
+ """Parallel component."""
+ JOB_SCHEDULE = "JobSchedule"
+ """Job Schedule."""
+ CAPABILITY_HOST = "CapabilityHost"
+ """Capability host."""
+
+
+class DockerTypes:
+ """Docker types accepted by the SDK/CLI.
+
+ Docker types are used to define the docker types accepted by the SDK/CLI.
+ """
+
+ IMAGE = "Image"
+ """Image."""
+ BUILD = "Build"
+ """Build."""
+
+
+class DataType:
+ """Data types that a job or compute instance schedule accepts.
+
+ The supported data types are: simple and dataflow.
+ """
+
+ SIMPLE = "Simple"
+ """Simple data type."""
+ DATAFLOW = "Dataflow"
+ """Dataflow data type."""
+
+
+class LoggingLevel:
+ """Logging levels that a job or compute instance schedule accepts.
+
+ Logging levels are case-insensitive. For example, "WARNING" and "warning" are both valid. The supported logging
+ levels are: warning, info, and debug.
+ """
+
+ WARN = "WARNING"
+ INFO = "INFO"
+ DEBUG = "DEBUG"
+
+
+class TimeZone(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ """Time zones that a job or compute instance schedule accepts."""
+
+ DATELINE_STANDARD_TIME = "Dateline Standard Time"
+ UTC_11 = "UTC-11"
+ ALEUTIAN_STANDARD_TIME = "Aleutian Standard Time"
+ HAWAIIAN_STANDARD_TIME = "Hawaiian Standard Time"
+ MARQUESAS_STANDARD_TIME = "Marquesas Standard Time"
+ ALASKAN_STANDARD_TIME = "Alaskan Standard Time"
+ UTC_09 = "UTC-09"
+ PACIFIC_STANDARD_TIME_MEXICO = "Pacific Standard Time (Mexico)"
+ UTC_08 = "UTC-08"
+ PACIFIC_STANDARD_TIME = "Pacific Standard Time"
+ US_MOUNTAIN_STANDARD_TIME = "US Mountain Standard Time"
+ MOUNTAIN_STANDARD_TIME_MEXICO = "Mountain Standard Time (Mexico)"
+ MOUNTAIN_STANDARD_TIME = "Mountain Standard Time"
+ CENTRAL_AMERICA_STANDARD_TIME = "Central America Standard Time"
+ CENTRAL_STANDARD_TIME = "Central Standard Time"
+ EASTER_ISLAND_STANDARD_TIME = "Easter Island Standard Time"
+ CENTRAL_STANDARD_TIME_MEXICO = "Central Standard Time (Mexico)"
+ CANADA_CENTRAL_STANDARD_TIME = "Canada Central Standard Time"
+ SA_PACIFIC_STANDARD_TIME = "SA Pacific Standard Time"
+ EASTERN_STANDARD_TIME_MEXICO = "Eastern Standard Time (Mexico)"
+ EASTERN_STANDARD_TIME = "Eastern Standard Time"
+ HAITI_STANDARD_TIME = "Haiti Standard Time"
+ CUBA_STANDARD_TIME = "Cuba Standard Time"
+ US_EASTERN_STANDARD_TIME = "US Eastern Standard Time"
+ PARAGUAY_STANDARD_TIME = "Paraguay Standard Time"
+ ATLANTIC_STANDARD_TIME = "Atlantic Standard Time"
+ VENEZUELA_STANDARD_TIME = "Venezuela Standard Time"
+ CENTRAL_BRAZILIAN_STANDARD_TIME = "Central Brazilian Standard Time"
+ SA_WESTERN_STANDARD_TIME = "SA Western Standard Time"
+ PACIFIC_SA_STANDARD_TIME = "Pacific SA Standard Time"
+ TURKS_AND_CAICOS_STANDARD_TIME = "Turks And Caicos Standard Time"
+ NEWFOUNDLAND_STANDARD_TIME = "Newfoundland Standard Time"
+ TOCANTINS_STANDARD_TIME = "Tocantins Standard Time"
+ E_SOUTH_AMERICAN_STANDARD_TIME = "E. South America Standard Time"
+ SA_EASTERN_STANDARD_TIME = "SA Eastern Standard Time"
+ ARGENTINA_STANDARD_TIME = "Argentina Standard Time"
+ GREENLAND_STANDARD_TIME = "Greenland Standard Time"
+ MONTEVIDEO_STANDARD_TIME = "Montevideo Standard Time"
+ SAINT_PIERRE_STANDARD_TIME = "Saint Pierre Standard Time"
+ BAHIA_STANDARD_TIME = "Bahia Standard Time"
+ UTC_02 = "UTC-02"
+ MID_ATLANTIC_STANDARD_TIME = "Mid-Atlantic Standard Time"
+ AZORES_STANDARD_TIME = "Azores Standard Time"
+ CAPE_VERDE_STANDARD_TIME = "Cape Verde Standard Time"
+ UTC = "UTC"
+ MOROCCO_STANDARD_TIME = "Morocco Standard Time"
+ GMT_STANDARD_TIME = "GMT Standard Time"
+ GREENWICH_STANDARD_TIME = "Greenwich Standard Time"
+ W_EUROPE_STANDARD_TIME = "W. Europe Standard Time"
+ CENTRAL_EUROPE_STANDARD_TIME = "Central Europe Standard Time"
+ ROMANCE_STANDARD_TIME = "Romance Standard Time"
+ CENTRAL_EUROPEAN_STANDARD_TIME = "Central European Standard Time"
+ W_CENTEAL_AFRICA_STANDARD_TIME = "W. Central Africa Standard Time"
+ NAMIBIA_STANDARD_TIME = "Namibia Standard Time"
+ JORDAN_STANDARD_TIME = "Jordan Standard Time"
+ GTB_STANDARD_TIME = "GTB Standard Time"
+ MIDDLE_EAST_STANDARD_TIME = "Middle East Standard Time"
+ EGYPT_STANDARD_TIME = "Egypt Standard Time"
+ E_EUROPE_STANDARD_TIME = "E. Europe Standard Time"
+ SYRIA_STANDARD_TIME = "Syria Standard Time"
+ WEST_BANK_STANDARD_TIME = "West Bank Standard Time"
+ SOUTH_AFRICA_STANDARD_TIME = "South Africa Standard Time"
+ FLE_STANDARD_TIME = "FLE Standard Time"
+ TURKEY_STANDARD_TIME = "Turkey Standard Time"
+ ISRAEL_STANDARD_TIME = "Israel Standard Time"
+ KALININGRAD_STANDARD_TIME = "Kaliningrad Standard Time"
+ LIBYA_STANDARD_TIME = "Libya Standard Time"
+ ARABIC_STANDARD_TIME = "Arabic Standard Time"
+ ARAB_STANDARD_TIME = "Arab Standard Time"
+ BELARUS_STANDARD_TIME = "Belarus Standard Time"
+ RUSSIAN_STANDARD_TIME = "Russian Standard Time"
+ E_AFRICA_STANDARD_TIME = "E. Africa Standard Time"
+ IRAN_STANDARD_TIME = "Iran Standard Time"
+ ARABIAN_STANDARD_TIME = "Arabian Standard Time"
+ ASTRAKHAN_STANDARD_TIME = "Astrakhan Standard Time"
+ AZERBAIJAN_STANDARD_TIME = "Azerbaijan Standard Time"
+ RUSSIA_TIME_ZONE_3 = "Russia Time Zone 3"
+ MAURITIUS_STANDARD_TIME = "Mauritius Standard Time"
+ GEORGIAN_STANDARD_TIME = "Georgian Standard Time"
+ CAUCASUS_STANDARD_TIME = "Caucasus Standard Time"
+ AFGHANISTANA_STANDARD_TIME = "Afghanistan Standard Time"
+ WEST_ASIA_STANDARD_TIME = "West Asia Standard Time"
+ EKATERINBURG_STANDARD_TIME = "Ekaterinburg Standard Time"
+ PAKISTAN_STANDARD_TIME = "Pakistan Standard Time"
+ INDIA_STANDARD_TIME = "India Standard Time"
+ SRI_LANKA_STANDARD_TIME = "Sri Lanka Standard Time"
+ NEPAL_STANDARD_TIME = "Nepal Standard Time"
+ CENTRAL_ASIA_STANDARD_TIME = "Central Asia Standard Time"
+ BANGLADESH_STANDARD_TIME = "Bangladesh Standard Time"
+ N_CENTRAL_ASIA_STANDARD_TIME = "N. Central Asia Standard Time"
+ MYANMAR_STANDARD_TIME = "Myanmar Standard Time"
+ SE_ASIA_STANDARD_TIME = "SE Asia Standard Time"
+ ALTAI_STANDARD_TIME = "Altai Standard Time"
+ W_MONGOLIA_STANDARD_TIME = "W. Mongolia Standard Time"
+ NORTH_ASIA_STANDARD_TIME = "North Asia Standard Time"
+ TOMSK_STANDARD_TIME = "Tomsk Standard Time"
+ CHINA_STANDARD_TIME = "China Standard Time"
+ NORTH_ASIA_EAST_STANDARD_TIME = "North Asia East Standard Time"
+ SINGAPORE_STANDARD_TIME = "Singapore Standard Time"
+ W_AUSTRALIA_STANDARD_TIME = "W. Australia Standard Time"
+ TAIPEI_STANDARD_TIME = "Taipei Standard Time"
+ ULAANBAATAR_STANDARD_TIME = "Ulaanbaatar Standard Time"
+ NORTH_KOREA_STANDARD_TIME = "North Korea Standard Time"
+ AUS_CENTRAL_W_STANDARD_TIME = "Aus Central W. Standard Time"
+ TRANSBAIKAL_STANDARD_TIME = "Transbaikal Standard Time"
+ TOKYO_STANDARD_TIME = "Tokyo Standard Time"
+ KOREA_STANDARD_TIME = "Korea Standard Time"
+ YAKUTSK_STANDARD_TIME = "Yakutsk Standard Time"
+ CEN_AUSTRALIA_STANDARD_TIME = "Cen. Australia Standard Time"
+ AUS_CENTRAL_STANDARD_TIME = "AUS Central Standard Time"
+ E_AUSTRALIAN_STANDARD_TIME = "E. Australia Standard Time"
+ AUS_EASTERN_STANDARD_TIME = "AUS Eastern Standard Time"
+ WEST_PACIFIC_STANDARD_TIME = "West Pacific Standard Time"
+ TASMANIA_STANDARD_TIME = "Tasmania Standard Time"
+ VLADIVOSTOK_STANDARD_TIME = "Vladivostok Standard Time"
+ LORD_HOWE_STANDARD_TIME = "Lord Howe Standard Time"
+ BOUGAINVILLE_STANDARD_TIME = "Bougainville Standard Time"
+ RUSSIA_TIME_ZONE_10 = "Russia Time Zone 10"
+ MAGADAN_STANDARD_TIME = "Magadan Standard Time"
+ NORFOLK_STANDARD_TIME = "Norfolk Standard Time"
+ SAKHALIN_STANDARD_TIME = "Sakhalin Standard Time"
+ CENTRAL_PACIFIC_STANDARD_TIME = "Central Pacific Standard Time"
+ RUSSIA_TIME_ZONE_11 = "Russia Time Zone 11"
+ NEW_ZEALAND_STANDARD_TIME = "New Zealand Standard Time"
+ UTC_12 = "UTC+12"
+ FIJI_STANDARD_TIME = "Fiji Standard Time"
+ KAMCHATKA_STANDARD_TIME = "Kamchatka Standard Time"
+ CHATHAM_ISLANDS_STANDARD_TIME = "Chatham Islands Standard Time"
+ TONGA__STANDARD_TIME = "Tonga Standard Time"
+ SAMOA_STANDARD_TIME = "Samoa Standard Time"
+ LINE_ISLANDS_STANDARD_TIME = "Line Islands Standard Time"
+
+
+class AssetTypes:
+ """AssetTypes is an enumeration of values for the asset types of a dataset.
+
+ Asset types are used to identify the type of an asset. An asset can be a file, folder, mlflow model, triton model,
+ mltable or custom model.
+ """
+
+ URI_FILE = "uri_file"
+ """URI file asset type."""
+ URI_FOLDER = "uri_folder"
+ """URI folder asset type."""
+ MLTABLE = "mltable"
+ """MLTable asset type."""
+ MLFLOW_MODEL = "mlflow_model"
+ """MLFlow model asset type."""
+ TRITON_MODEL = "triton_model"
+ """Triton model asset type."""
+ CUSTOM_MODEL = "custom_model"
+ """Custom model asset type."""
+
+
+class InputTypes:
+ """InputTypes is an enumeration of values for the input types of a dataset.
+
+ Input types are used to identify the type of an asset.
+ """
+
+ INTEGER = "integer"
+ """Integer input type."""
+ NUMBER = "number"
+ """Number input type."""
+ STRING = "string"
+ """String input type."""
+ BOOLEAN = "boolean"
+ """Boolean input type."""
+
+
+class WorkspaceResourceConstants:
+ """WorkspaceResourceConstants is an enumeration of values for the encryption status of a workspace.
+
+ :param object: Flag to indicate that if the encryption is enabled or not.
+ :type object: str
+ """
+
+ ENCRYPTION_STATUS_ENABLED = "Enabled"
+ """Encryption is enabled."""
+
+
+class InputOutputModes:
+ """InputOutputModes is an enumeration of values for the input/output modes of a dataset.
+
+ Input/output modes are used to identify the type of an asset when it is created using the API.
+ """
+
+ MOUNT = "mount"
+ """Mount asset type."""
+
+ DOWNLOAD = "download"
+ """Download asset type."""
+ UPLOAD = "upload"
+ """Upload asset type."""
+ RO_MOUNT = "ro_mount"
+ """Read-only mount asset type."""
+ RW_MOUNT = "rw_mount"
+ """Read-write mount asset type."""
+ EVAL_MOUNT = "eval_mount"
+ """Evaluation mount asset type."""
+ EVAL_DOWNLOAD = "eval_download"
+ """Evaluation download asset type."""
+ DIRECT = "direct"
+ """Direct asset type."""
+
+
+class ConnectionTypes:
+ """Names for connection types that are different from that underlying api enum values
+ from the ConnectionCategory class."""
+
+ CUSTOM = "custom" # Corresponds to "custom_keys".
+ AZURE_DATA_LAKE_GEN_2 = "azure_data_lake_gen2" # Corresponds to "alds_gen2".
+ AZURE_CONTENT_SAFETY = "azure_content_safety" # Corresponds to "cognitive_service" with kind "content_safety".
+ AZURE_SPEECH_SERVICES = "azure_speech_services" # Corresponds to "cognitive_service" with kind "speech".
+ AZURE_SEARCH = "azure_ai_search" # Corresponds to "cognitive_search"
+ AZURE_AI_SERVICES = "azure_ai_services" # Corresponds to "ai_services"
+ AI_SERVICES_REST_PLACEHOLDER = "AIServices" # placeholder until REST enum "ai_services" is published.
+
+
+class OneLakeArtifactTypes:
+ """Names for fabric types that specific sub-types of MicrosoftOneLakeConnections"""
+
+ ONE_LAKE = "lake_house"
+
+
+class CognitiveServiceKinds:
+ """Subtypes for connections using the Cognitive service type. These
+ values are plugged into the connection's metadata."""
+
+ CONTENT_SAFETY = "Content Safety"
+ SPEECH = "speech"
+
+
+class LegacyAssetTypes:
+ """LegacyAssetTypes is an enumeration of values for the legacy asset types.
+
+ Legacy asset types are used to identify the type of an asset when it is created using the legacy API.
+ """
+
+ PATH = "path"
+ """Path asset type."""
+
+
+class PublicNetworkAccess:
+ """PublicNetworkAccess is an enumeration of values for the public network access setting for a workspace.
+
+ Public network access can be 'Enabled' or 'Disabled'. When enabled, Azure Machine Learning will allow all network
+ traffic to the workspace. When disabled, Azure Machine Learning will only allow traffic from the Azure Virtual
+ Network that the workspace is in.
+ """
+
+ ENABLED = "Enabled"
+ """Enable public network access."""
+ DISABLED = "Disabled"
+ """Disable public network access."""
+
+
+class ModelType:
+ """ModelType is an enumeration of values for the model types.
+
+ Model types are used to identify the type of a model when it is created using the API. Model types can be
+ 'CustomModel', 'MLFlowModel' or 'TritonModel'.
+ """
+
+ CUSTOM = "CustomModel"
+ """Custom model type."""
+ MLFLOW = "MLFlowModel"
+ """MLFlow model type."""
+ TRITON = "TritonModel"
+ """Triton model type."""
+
+
+class RollingRate:
+ """RollingRate is an enumeration of values for the rolling rate of a dataset.
+
+ Rolling rate can be 'day', 'hour' or 'minute'.
+ """
+
+ DAY = "day"
+ """Day rolling rate."""
+ HOUR = "hour"
+ """Hour rolling rate."""
+ MINUTE = "minute"
+ """Minute rolling rate."""
+
+
+class Scope:
+ """Scope is an enumeration of values for the scope of an asset.
+
+ Scope can be 'subscription' or 'resource_group'.
+ """
+
+ SUBSCRIPTION = "subscription"
+ """Subscription scope."""
+ RESOURCE_GROUP = "resource_group"
+ """Resource group scope."""
+
+
+class IdentityType:
+ """IdentityType is an enumeration of values for the identity type of a workspace.
+
+ Identity type can be 'aml_token', 'user_identity' or 'managed_identity'.
+ """
+
+ AML_TOKEN = "aml_token"
+ """AML Token identity type."""
+ USER_IDENTITY = "user_identity"
+ """User identity type."""
+ MANAGED_IDENTITY = "managed_identity"
+ """Managed identity type."""
+
+
+class Boolean:
+ """Boolean is an enumeration of values for the boolean type.
+
+ Boolean type can be 'true' or 'false'.
+ """
+
+ TRUE = "true"
+ """True boolean type."""
+ FALSE = "false"
+ """False boolean type."""
+
+
+class InferenceServerType:
+ AZUREML_ONLINE = "azureml_online"
+ AZUREML_BATCH = "azureml_batch"
+ TRITON = "triton"
+ CUSTOM = "custom"
+
+
+class AzureDevopsArtifactsType:
+ ARTIFACT = "artifact"
+
+
+class DataIndexTypes:
+ """DataIndexTypes is an enumeration of values for the types out indexes which can be written to by DataIndex."""
+
+ ACS = "acs"
+ """Azure Cognitive Search index type."""
+ PINECONE = "pinecone"
+ """Pinecone index type."""
+ FAISS = "faiss"
+ """Faiss index type."""
+
+
+class IndexInputType:
+ """An enumeration of values for the types of input data for an index."""
+
+ GIT = "git"
+ LOCAL = "local"
+
+
+class ScheduleType(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ JOB = "job"
+ MONITOR = "monitor"
+ DATA_IMPORT = "data_import"
+
+
+class AutoDeleteCondition(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ CREATED_GREATER_THAN = "created_greater_than"
+ LAST_ACCESSED_GREATER_THAN = "last_accessed_greater_than"
+
+
+class WorkspaceKind:
+ """Enum of workspace categories."""
+
+ DEFAULT = "default"
+ HUB = "hub"
+ PROJECT = "project"
+ FEATURE_STORE = "featurestore"
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/constants/_component.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/constants/_component.py
new file mode 100644
index 00000000..c8752c68
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/constants/_component.py
@@ -0,0 +1,150 @@
+# ---------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# ---------------------------------------------------------
+
+from typing import Dict
+
+DO_WHILE_MAX_ITERATION = 1000
+
+
+class ComponentJobConstants(object):
+ INPUT_PATTERN = r"^\$\{\{parent\.(inputs|jobs)\.(.*?)\}\}$"
+ OUTPUT_PATTERN = r"^\$\{\{parent\.outputs\.(.*?)\}\}$"
+ LEGACY_INPUT_PATTERN = r"^\$\{\{(inputs|jobs)\.(.*?)\}\}$"
+ LEGACY_OUTPUT_PATTERN = r"^\$\{\{outputs\.(.*?)\}\}$"
+ INPUT_DESTINATION_FORMAT = "jobs.{}.inputs.{}"
+ OUTPUT_DESTINATION_FORMAT = "jobs.{}.outputs.{}"
+
+
+class NodeType(object):
+ COMMAND = "command"
+ SWEEP = "sweep"
+ PARALLEL = "parallel"
+ AUTOML = "automl"
+ PIPELINE = "pipeline"
+ IMPORT = "import"
+ SPARK = "spark"
+ DATA_TRANSFER = "data_transfer"
+ FLOW_PARALLEL = "promptflow_parallel"
+ # Note: container is not a real component type,
+ # only used to mark component from container data.
+ _CONTAINER = "_container"
+
+
+class ControlFlowType(object):
+ DO_WHILE = "do_while"
+ IF_ELSE = "if_else"
+ PARALLEL_FOR = "parallel_for"
+
+
+CONTROL_FLOW_TYPES = [getattr(ControlFlowType, k) for k in dir(ControlFlowType) if k.isupper()]
+
+
+class DataTransferTaskType(object):
+ COPY_DATA = "copy_data"
+ IMPORT_DATA = "import_data"
+ EXPORT_DATA = "export_data"
+
+
+class DataCopyMode(object):
+ MERGE_WITH_OVERWRITE = "merge_with_overwrite"
+ FAIL_IF_CONFLICT = "fail_if_conflict"
+
+
+class ExternalDataType(object):
+ FILE_SYSTEM = "file_system"
+ DATABASE = "database"
+
+
+class DataTransferBuiltinComponentUri(object):
+ IMPORT_DATABASE = "azureml://registries/azureml/components/import_data_database/versions/0.0.1"
+ IMPORT_FILE_SYSTEM = "azureml://registries/azureml/components/import_data_file_system/versions/0.0.1"
+ EXPORT_DATABASE = "azureml://registries/azureml/components/export_data_database/versions/0.0.1"
+
+
+class LLMRAGComponentUri:
+ LLM_RAG_CRACK_AND_CHUNK = "azureml://registries/azureml/components/llm_rag_crack_and_chunk/labels/default"
+ LLM_RAG_GENERATE_EMBEDDINGS = "azureml://registries/azureml/components/llm_rag_generate_embeddings/labels/default"
+ LLM_RAG_CRACK_AND_CHUNK_AND_EMBED = (
+ "azureml://registries/azureml/components/llm_rag_crack_and_chunk_and_embed/labels/default"
+ )
+ LLM_RAG_UPDATE_ACS_INDEX = "azureml://registries/azureml/components/llm_rag_update_acs_index/labels/default"
+ LLM_RAG_UPDATE_PINECONE_INDEX = (
+ "azureml://registries/azureml/components/llm_rag_update_pinecone_index/labels/default"
+ )
+ LLM_RAG_CREATE_FAISS_INDEX = "azureml://registries/azureml/components/llm_rag_create_faiss_index/labels/default"
+ LLM_RAG_REGISTER_MLINDEX_ASSET = (
+ "azureml://registries/azureml/components/llm_rag_register_mlindex_asset/labels/default"
+ )
+ LLM_RAG_VALIDATE_DEPLOYMENTS = "azureml://registries/azureml/components/llm_rag_validate_deployments/labels/default"
+ LLM_RAG_CREATE_PROMPTFLOW = "azureml://registries/azureml/components/llm_rag_create_promptflow/labels/default"
+
+
+class ComponentSource:
+ """Indicate where the component is constructed."""
+
+ BUILDER = "BUILDER"
+ DSL = "DSL"
+ CLASS = "CLASS"
+ REMOTE_WORKSPACE_JOB = "REMOTE.WORKSPACE.JOB"
+ REMOTE_WORKSPACE_COMPONENT = "REMOTE.WORKSPACE.COMPONENT"
+ REMOTE_REGISTRY = "REMOTE.REGISTRY"
+ YAML_JOB = "YAML.JOB"
+ YAML_COMPONENT = "YAML.COMPONENT"
+ BUILTIN = "BUILTIN"
+
+
+class ParallelTaskType:
+ RUN_FUNCTION = "run_function"
+ FUNCTION = "function"
+ MODEL = "model"
+
+
+class ComponentParameterTypes:
+ NUMBER = "number"
+ INTEGER = "integer"
+ BOOLEAN = "boolean"
+ STRING = "string"
+
+
+class IOConstants:
+ PRIMITIVE_STR_2_TYPE = {
+ ComponentParameterTypes.INTEGER: int,
+ ComponentParameterTypes.STRING: str,
+ ComponentParameterTypes.NUMBER: float,
+ ComponentParameterTypes.BOOLEAN: bool,
+ }
+ PRIMITIVE_TYPE_2_STR = {
+ int: ComponentParameterTypes.INTEGER,
+ str: ComponentParameterTypes.STRING,
+ float: ComponentParameterTypes.NUMBER,
+ bool: ComponentParameterTypes.BOOLEAN,
+ }
+ TYPE_MAPPING_YAML_2_REST = {
+ ComponentParameterTypes.STRING: "String",
+ ComponentParameterTypes.INTEGER: "Integer",
+ ComponentParameterTypes.NUMBER: "Number",
+ ComponentParameterTypes.BOOLEAN: "Boolean",
+ }
+ PARAM_PARSERS: Dict = {
+ ComponentParameterTypes.INTEGER: lambda v: int(float(v)), # parse case like 10.0 -> 10
+ ComponentParameterTypes.BOOLEAN: lambda v: str(v).lower() == "true",
+ ComponentParameterTypes.NUMBER: float,
+ }
+ # For validation, indicates specific parameters combination for each type
+ INPUT_TYPE_COMBINATION = {
+ "uri_folder": ["path", "mode"],
+ "uri_file": ["path", "mode"],
+ "mltable": ["path", "mode"],
+ "mlflow_model": ["path", "mode"],
+ "custom_model": ["path", "mode"],
+ "integer": ["default", "min", "max"],
+ "number": ["default", "min", "max"],
+ "string": ["default"],
+ "boolean": ["default"],
+ }
+ GROUP_ATTR_NAME = "__dsl_group__"
+ GROUP_TYPE_NAME = "group"
+ # Note: ([a-zA-Z_]+[a-zA-Z0-9_]*) is a valid single key,
+ # so a valid pipeline key is: ^{single_key}([.]{single_key})*$
+ VALID_KEY_PATTERN = r"^([a-zA-Z_]+[a-zA-Z0-9_]*)([.]([a-zA-Z_]+[a-zA-Z0-9_]*))*$"
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/constants/_compute.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/constants/_compute.py
new file mode 100644
index 00000000..963e13dd
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/constants/_compute.py
@@ -0,0 +1,137 @@
+# ---------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# ---------------------------------------------------------
+
+
+class ComputeType:
+ """ComputeType is an enum-like class that defines the types of compute that can be used for training.
+
+ ComputeType can be used to specify the compute type for a compute target. It can also be used to
+ specify the compute type for a compute target that is being created.
+
+ Valid values are "managed", "amlcompute", "computeinstance", "virtualmachine", "kubernetes", "DataFactory"
+ , "synapsespark".
+ """
+
+ MANAGED = "managed"
+ """Managed compute is a compute resource that is managed by Azure Machine Learning."""
+ AMLCOMPUTE = "amlcompute"
+ """AmlCompute is a compute resource that is managed by Azure Machine Learning."""
+ COMPUTEINSTANCE = "computeinstance"
+ """Compute Instance is a compute resource that is managed by Azure Machine Learning."""
+ VIRTUALMACHINE = "virtualmachine"
+ """Virtual Machine is a compute resource that is managed by Azure Machine Learning."""
+ KUBERNETES = "kubernetes"
+ """Kubernetes is a compute resource that is managed by Azure Machine Learning."""
+ ADF = "DataFactory"
+ """Data Factory is a compute resource that is managed by Azure Machine Learning."""
+ SYNAPSESPARK = "synapsespark"
+ """Synapse Spark is a compute resource that is managed by Azure Machine Learning."""
+
+
+class ComputeTier:
+ """ComputeTier is an enum-like class that defines the tiers of compute that can be used for training.
+
+ ComputeTier can be used to specify the compute tier for a compute target. It can also be used to specify the compute
+ tier for a compute target that is being created. Valid values are "lowpriority", "dedicated".
+ """
+
+ LOWPRIORITY = "low_priority"
+ """LOWPRIORITY is a compute tier that is used for low priority compute targets."""
+ DEDICATED = "dedicated"
+ """DEDICATED is a compute tier that is used for dedicated compute targets."""
+
+
+class IdentityType:
+ """IdentityType is an enum-like class that defines the types of identity that can be used for compute.
+
+ IdentityType can be used to specify the identity type for a compute target. It can also be used to specify the
+ identity type for a compute target that is being created. Valid values are "system_assigned", "user_assigned",
+ "both".
+ """
+
+ SYSTEM_ASSIGNED = "system_assigned"
+ """SYSTEM_ASSIGNED is a compute identity type that is used for system assigned compute targets."""
+ USER_ASSIGNED = "user_assigned"
+ """USER_ASSIGNED is a compute identity type that is used for user assigned compute targets."""
+ BOTH = "system_assigned,user_assigned"
+ """BOTH is a compute identity type that is used for both system and user assigned compute targets."""
+
+
+class ComputeDefaults:
+ """Class defining Compute Defaults."""
+
+ VMSIZE = "STANDARD_DS3_V2"
+ """ComputeDefaults.VMSIZE: Indicates default VM size. (STANDARD_DS3_V2)
+ """
+ ADMIN_USER = "azureuser"
+ """ComputeDefaults.ADMIN_USER: Indicates default admin user. (azureuser)
+ """
+ MIN_NODES = 0
+ """ComputeDefaults.MIN_NODES: Indicates default minimum number of nodes. (0)
+ """
+ MAX_NODES = 4
+ """ComputeDefaults.MAX_NODES: Indicates default maximum number of nodes. (4)
+ """
+ IDLE_TIME = 1800
+ """ComputeDefaults.IDLE_TIME: Indicates default idle time. (1800)
+ """
+ PRIORITY = "Dedicated"
+ """ComputeDefaults.PRIORITY: Indicates default priority. (Dedicated)
+ """
+
+
+class CustomApplicationDefaults:
+ """Class defining Custom Application Defaults."""
+
+ TARGET_PORT = "target_port"
+ """CustomApplicationDefaults.TARGET_PORT: Indicates target port of the custom application on the Compute
+ Instance. (target_port)
+ """
+
+ PUBLISHED_PORT = "published_port"
+ """CustomApplicationDefaults.PUBLISHED_PORT: Indicates published port of the custom application on the Compute
+ Instance. (published_port)
+ """
+
+ PORT_MIN_VALUE = 1025
+ """CustomApplicationDefaults.PORT_MIN_VALUE: Indicates minimum port value of the custom application on the
+ Compute Instance. (1025)
+ """
+
+ PORT_MAX_VALUE = 65535
+ """CustomApplicationDefaults.PORT_MAX_VALUE: Indicates maximum port value of the custom application on the
+ Compute Instance. (65535)
+ """
+
+ DOCKER = "docker"
+ """CustomApplicationDefaults.DOCKER: Indicates type of a docker custom application on the Compute Instance. (docker)
+ """
+
+ ENDPOINT_NAME = "connect"
+ """CustomApplicationDefaults.ENDPOINT_NAME: Indicates endpoint name of the custom application on the Compute
+ Instance. (connect)
+ """
+
+
+class ComputeSizeTier:
+ """Class defining Compute size tiers."""
+
+ AML_COMPUTE_DEDICATED = "amlComputeDedicatedVMSize"
+ """ComputeSizeTier.AML_COMPUTE_DEDICATED: Indicates Compute Sizes should match Dedicated-tier Virtual Machines.
+ (amlComputeDedicatedVmSize)
+ """
+
+ AML_COMPUTE_LOWPRIORITY = "amlComputeLowPriorityVMSize"
+ """ComputeSizeTier.AML_COMPUTE_LOWPRIORITY: Indicates Compute Sizes should match Low Priority-tier Virtual
+ Machines. (amlcomputeLowPriorityVMSize)
+ """
+
+ COMPUTE_INSTANCE = "computeInstanceVMSize"
+ """ComputeSizeTier.COMPUTE_INSTANCE: Indicates Compute Sizes should match Compute Instance Virtual Machines.
+ (computeInstanceVMSize)
+ """
+
+
+DUPLICATE_APPLICATION_ERROR = "Value of {} must be unique across all custom applications."
+INVALID_VALUE_ERROR = "Value of {} must be between {} and {}."
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/constants/_deployment.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/constants/_deployment.py
new file mode 100644
index 00000000..c1745e09
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/constants/_deployment.py
@@ -0,0 +1,29 @@
+# ---------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# ---------------------------------------------------------
+
+
+class DeploymentType(object):
+ K8S = "Kubernetes"
+ MANAGED = "Managed"
+
+
+class BatchDeploymentOutputAction:
+ APPEND_ROW = "append_row"
+ SUMMARY_ONLY = "summary_only"
+
+
+class BatchDeploymentType:
+ MODEL = "model"
+ PIPELINE = "pipeline"
+
+
+class EndpointDeploymentLogContainerType(object):
+ STORAGE_INITIALIZER_REST = "StorageInitializer"
+ INFERENCE_SERVER_REST = "InferenceServer"
+ INFERENCE_SERVER = "inference-server"
+ STORAGE_INITIALIZER = "storage-initializer"
+
+
+SmallSKUs = ["standard_ds1_v2", "standard_ds2_v2"]
+DEFAULT_MDC_PATH = "azureml://datastores/workspaceblobstore/paths/modelDataCollector"
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/constants/_endpoint.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/constants/_endpoint.py
new file mode 100644
index 00000000..4f9525b8
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/constants/_endpoint.py
@@ -0,0 +1,93 @@
+# ---------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# ---------------------------------------------------------
+
+
+class EndpointKeyType(object):
+ PRIMARY_KEY_TYPE = "primary"
+ SECONDARY_KEY_TYPE = "secondary"
+
+
+class EndpointInvokeFields(object):
+ DEFAULT_HEADER = {"Content-Type": "application/json"}
+ AUTHORIZATION = "Authorization"
+ MODEL_DEPLOYMENT = "azureml-model-deployment"
+ REPEATABILITY_REQUEST_ID = "repeatability_request-id"
+
+
+class EndpointGetLogsFields(object):
+ LINES = 5000
+
+
+class EndpointYamlFields(object):
+ TYPE = "type"
+ TRAFFIC_NAME = "traffic"
+ NAME = "name"
+ SCALE_SETTINGS = "scale_settings"
+ SCALE_TYPE = "scale_type"
+ INSTANCE_COUNT = "instance_count"
+ MINIMUM = "min_instances"
+ MAXIMUM = "max_instances"
+ POLLING_INTERVAL = "polling_interval"
+ TARGET_UTILIZATION_PERCENTAGE = "target_utilization_percentage"
+ SKU_DEFAULT = "Standard_F4s_v2"
+ COMPUTE = "compute"
+ CODE_CONFIGURATION = "code_configuration"
+ CODE = "code"
+ SCORING_SCRIPT = "scoring_script"
+ SCORING_URI = "scoring_uri"
+ OPENAPI_URI = "openapi_uri"
+ PROVISIONING_STATE = "provisioning_state"
+ MINI_BATCH_SIZE = "mini_batch_size"
+ RETRY_SETTINGS = "retry_settings"
+ BATCH_JOB_INPUT_DATA = "input_data"
+ BATCH_JOB_INSTANCE_COUNT = "compute.instance_count"
+ BATCH_JOB_OUTPUT_DATA = "output_data"
+ BATCH_JOB_OUTPUT_PATH = "output_dataset.path"
+ BATCH_JOB_OUTPUT_DATSTORE = "output_dataset.datastore_id"
+ BATCH_JOB_NAME = "job_name"
+ BATCH_JOB_EXPERIMENT_NAME = "experiment_name"
+ BATCH_JOB_PROPERTIES = "properties"
+
+
+class EndpointConfigurations:
+ MIN_NAME_LENGTH = 3
+ MAX_NAME_LENGTH = 32
+ NAME_REGEX_PATTERN = r"^[a-zA-Z]([-a-zA-Z0-9]*[a-zA-Z0-9])?$"
+
+
+class LocalEndpointConstants:
+ CONDA_FILE_NAME = "conda.yml"
+ DOCKER_PORT = "5001"
+ LABEL_KEY_AZUREML_LOCAL_ENDPOINT = "azureml-local-endpoint"
+ LABEL_KEY_ENDPOINT_NAME = "endpoint"
+ LABEL_KEY_DEPLOYMENT_NAME = "deployment"
+ LABEL_KEY_ENDPOINT_JSON = "endpoint-data"
+ LABEL_KEY_DEPLOYMENT_JSON = "deployment-data"
+ LABEL_KEY_AZUREML_PORT = "azureml-port"
+ DEFAULT_STARTUP_WAIT_TIME_SECONDS = 15
+ CONTAINER_EXITED = "exited"
+ ENDPOINT_STATE_FAILED = "Failed"
+ ENDPOINT_STATE_SUCCEEDED = "Succeeded"
+ ENDPOINT_STATE_LOCATION = "local"
+ AZUREML_APP_PATH = "/var/azureml-app/"
+ ENVVAR_KEY_AZUREML_ENTRY_SCRIPT = "AZUREML_ENTRY_SCRIPT"
+ ENVVAR_KEY_AZUREML_MODEL_DIR = "AZUREML_MODEL_DIR"
+ ENVVAR_KEY_AML_APP_ROOT = "AML_APP_ROOT"
+ ENVVAR_KEY_AZUREML_INFERENCE_PYTHON_PATH = "AZUREML_INFERENCE_PYTHON_PATH"
+ CONDA_ENV_NAME = "inf-conda-env"
+ CONDA_ENV_BIN_PATH = "/opt/miniconda/envs/inf-conda-env/bin"
+ CONDA_ENV_PYTHON_PATH = "/opt/miniconda/envs/inf-conda-env/bin/python"
+
+
+class BatchEndpointInvoke:
+ INPUTS = "inputs"
+ OUTPUTS = "outputs"
+ ENDPOINT = "endpoint"
+ DEPLOYMENT = "deployment"
+ TYPE = "type"
+ MODE = "mode"
+ PATH = "path"
+ DEFAULT = "default"
+ MIN = "min"
+ MAX = "max"
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/constants/_finetuning.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/constants/_finetuning.py
new file mode 100644
index 00000000..2aecab91
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/constants/_finetuning.py
@@ -0,0 +1,17 @@
+# ---------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# ---------------------------------------------------------
+
+
+class FineTuningTaskType:
+ CHAT_COMPLETION = "ChatCompletion"
+ TEXT_COMPLETION = "TextCompletion"
+ TEXT_CLASSIFICATION = "TextClassification"
+ QUESTION_ANSWERING = "QuestionAnswering"
+ TEXT_SUMMARIZATION = "TextSummarization"
+ TOKEN_CLASSIFICATION = "TokenClassification"
+ TEXT_TRANSLATION = "TextTranslation"
+ IMAGE_CLASSIFICATION = "ImageClassification"
+ IMAGE_INSTANCE_SEGMENTATION = "ImageInstanceSegmentation"
+ IMAGE_OBJECT_DETECTION = "ImageObjectDetection"
+ VIDEO_MULTI_OBJECT_TRACKING = "VideoMultiObjectTracking"
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/constants/_job/__init__.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/constants/_job/__init__.py
new file mode 100644
index 00000000..0d89198a
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/constants/_job/__init__.py
@@ -0,0 +1,38 @@
+# ---------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# ---------------------------------------------------------
+
+__path__ = __import__("pkgutil").extend_path(__path__, __name__)
+
+from .automl import (
+ AutoMLConstants,
+ AutoMLTransformerParameterKeys,
+ ImageClassificationModelNames,
+ ImageInstanceSegmentationModelNames,
+ ImageObjectDetectionModelNames,
+ NlpLearningRateScheduler,
+ NlpModels,
+ TabularTrainingMode,
+)
+from .distillation import DataGenerationTaskType, DataGenerationType
+from .job import DistributionType, ImportSourceType, JobType
+from .pipeline import PipelineConstants
+from .sweep import SearchSpace
+
+__all__ = [
+ "AutoMLConstants",
+ "AutoMLTransformerParameterKeys",
+ "DataGenerationTaskType",
+ "DataGenerationType",
+ "DistributionType",
+ "ImageClassificationModelNames",
+ "ImageObjectDetectionModelNames",
+ "ImageInstanceSegmentationModelNames",
+ "JobType",
+ "ImportSourceType",
+ "PipelineConstants",
+ "SearchSpace",
+ "NlpModels",
+ "NlpLearningRateScheduler",
+ "TabularTrainingMode",
+]
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/constants/_job/automl.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/constants/_job/automl.py
new file mode 100644
index 00000000..41af0781
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/constants/_job/automl.py
@@ -0,0 +1,116 @@
+# ---------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# ---------------------------------------------------------
+from enum import Enum
+
+# pylint: disable=unused-import
+from azure.ai.ml._restclient.v2023_04_01_preview.models import NlpLearningRateScheduler, TrainingMode
+from azure.ai.ml._utils._experimental import experimental
+
+
+class AutoMLConstants:
+ # The following are fields found in the yaml for AutoML Job
+ GENERAL_YAML = "general"
+ DATA_YAML = "data"
+ FEATURIZATION_YAML = "featurization"
+ LIMITS_YAML = "limits"
+ SWEEP_YAML = "sweep"
+ FORECASTING_YAML = "forecasting"
+ TRAINING_YAML = "training"
+ MAX_TRIALS_YAML = "max_trials"
+ VALIDATION_DATASET_SIZE_YAML = "validation_dataset_size"
+ TRAINING_DATA_SETTINGS_YAML = "training"
+ TEST_DATA_SETTINGS_YAML = "test"
+ VALIDATION_DATA_SETTINGS_YAML = "validation"
+ COUNTRY_OR_REGION_YAML = "country_or_region_for_holidays"
+ TASK_TYPE_YAML = "task"
+ TIMEOUT_YAML = "timeout_minutes"
+ TRIAL_TIMEOUT_YAML = "trial_timeout_minutes"
+ BLOCKED_ALGORITHMS_YAML = "blocked_training_algorithms"
+ ALLOWED_ALGORITHMS_YAML = "allowed_training_algorithms"
+ ENSEMBLE_MODEL_DOWNLOAD_TIMEOUT_YAML = "ensemble_model_download_timeout_minutes"
+ TERMINATION_POLICY_TYPE_YAML = "type"
+
+ # TASK TYPES
+ CLASSIFICATION_YAML = "classification"
+ REGRESSION_YAML = "regression"
+ FORECASTING_YAML = "forecasting"
+
+ # The following are general purpose AutoML fields
+ TARGET_LAGS = "target_lags"
+ AUTO = "auto"
+ OFF = "off"
+ CUSTOM = "custom"
+ TIME_SERIES_ID_COLUMN_NAMES = "time_series_id_column_names"
+ TRANSFORMER_PARAMS = "transformer_params"
+ MODE = "mode"
+
+
+class AutoMLTransformerParameterKeys(Enum):
+ IMPUTER = "Imputer"
+ TF_IDF = "TfIdf"
+ HASH_ONE_HOT_ENCODER = "HashOneHotEncoder"
+
+
+class ImageClassificationModelNames(Enum):
+ """Model names that are supported for Image Classification tasks."""
+
+ MOBILENETV2 = "mobilenetv2"
+ RESNET18 = "resnet18"
+ RESNET34 = "resnet34"
+ RESNET50 = "resnet50"
+ RESNET101 = "resnet101"
+ RESNET152 = "resnet152"
+ RESNEST50 = "resnest50"
+ RESNEST101 = "resnest101"
+ SERESNEXT = "seresnext"
+ VITS16R224 = "vits16r224"
+ VITB16R224 = "vitb16r224"
+ VITL16R224 = "vitl16r224"
+
+
+class ImageObjectDetectionModelNames(Enum):
+ """Model names that are supported for Image Object Detection tasks."""
+
+ YOLOV5 = "yolov5"
+ FASTERRCNN_RESNET18_FPN = "fasterrcnn_resnet18_fpn"
+ FASTERRCNN_RESNET34_FPN = "fasterrcnn_resnet34_fpn"
+ FASTERRCNN_RESNET50_FPN = "fasterrcnn_resnet50_fpn"
+ FASTERRCNN_RESNET101_FPN = "fasterrcnn_resnet101_fpn"
+ FASTERRCNN_RESNET152_FPN = "fasterrcnn_resnet152_fpn"
+ RETINANET_RESNET50_FPN = "retinanet_resnet50_fpn"
+
+
+class ImageInstanceSegmentationModelNames(Enum):
+ """Model names that are supported for Image Instance Segmentation tasks."""
+
+ MASKRCNN_RESNET18_FPN = "maskrcnn_resnet18_fpn"
+ MASKRCNN_RESNET34_FPN = "maskrcnn_resnet34_fpn"
+ MASKRCNN_RESNET50_FPN = "maskrcnn_resnet50_fpn"
+ MASKRCNN_RESNET101_FPN = "maskrcnn_resnet101_fpn"
+ MASKRCNN_RESNET152_FPN = "maskrcnn_resnet152_fpn"
+
+
+class NlpModels(Enum):
+ """Model names that are supported for NLP (Natural Language Processing) tasks."""
+
+ BERT_BASE_CASED = "bert-base-cased"
+ BERT_BASE_UNCASED = "bert-base-uncased"
+ BERT_BASE_MULTILINGUAL_CASED = "bert-base-multilingual-cased"
+ BERT_BASE_GERMAN_CASED = "bert-base-german-cased"
+ BERT_LARGE_CASED = "bert-large-cased"
+ BERT_LARGE_UNCASED = "bert-large-uncased"
+ DISTILBERT_BASE_CASED = "distilbert-base-cased"
+ DISTILBERT_BASE_UNCASED = "distilbert-base-uncased"
+ ROBERTA_BASE = "roberta-base"
+ ROBERTA_LARGE = "roberta-large"
+ DISTILROBERTA_BASE = "distilroberta-base"
+ XLM_ROBERTA_BASE = "xlm-roberta-base"
+ XLM_ROBERTA_LARGE = "xlm-roberta-large"
+ XLNET_BASE_CASED = "xlnet-base-cased"
+ XLNET_LARGE_CASED = "xlnet-large-cased"
+
+
+TrainingMode.__doc__ = "Mode to enable/disable distributed training."
+TabularTrainingMode = experimental(TrainingMode)
+TabularTrainingMode.__name__ = "TabularTrainingMode"
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/constants/_job/distillation.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/constants/_job/distillation.py
new file mode 100644
index 00000000..4a6cf981
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/constants/_job/distillation.py
@@ -0,0 +1,16 @@
+# ---------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# ---------------------------------------------------------
+
+
+class DataGenerationTaskType:
+ NLI = "NLI"
+ NLU_QA = "NLU_QA"
+ CONVERSATION = "CONVERSATION"
+ MATH = "MATH"
+ SUMMARIZATION = "SUMMARIZATION"
+
+
+class DataGenerationType:
+ LABEL_GENERATION = "label_generation"
+ DATA_GENERATION = "data_generation"
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/constants/_job/finetuning.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/constants/_job/finetuning.py
new file mode 100644
index 00000000..8d763b63
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/constants/_job/finetuning.py
@@ -0,0 +1,26 @@
+# ---------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# ---------------------------------------------------------
+
+
+class FineTuningConstants:
+ AzureOpenAI = "azure-openai"
+ Custom = "custom"
+
+ TaskType = "task"
+ ModelProvider = "model_provider"
+ HyperParameters = "hyperparameters"
+
+
+class FineTuningTaskTypes:
+ CHAT_COMPLETION = "ChatCompletion"
+ TEXT_COMPLETION = "TextCompletion"
+ TEXT_CLASSIFICATION = "TextClassification"
+ QUESTION_ANSWERING = "QuestionAnswering"
+ TEXT_SUMMARIZATION = "TextSummarization"
+ TOKEN_CLASSIFICATION = "TokenClassification"
+ TEXT_TRANSLATION = "TextTranslation"
+ IMAGE_CLASSIFICATION = "ImageClassification"
+ IMAGE_INSTANCE_SEGMENTATION = "ImageInstanceSegmentation"
+ IMAGE_OBJECT_DETECTION = "ImageObjectDetection"
+ VIDEO_MULTI_OBJECT_TRACKING = "VideoMultiObjectTracking"
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/constants/_job/job.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/constants/_job/job.py
new file mode 100644
index 00000000..4037dbb3
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/constants/_job/job.py
@@ -0,0 +1,165 @@
+# ---------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# ---------------------------------------------------------
+
+
+class DistributionType:
+ MPI = "mpi"
+ TENSORFLOW = "tensorflow"
+ PYTORCH = "pytorch"
+ RAY = "ray"
+
+
+class JobType(object):
+ COMMAND = "command"
+ SWEEP = "sweep"
+ PIPELINE = "pipeline"
+ AUTOML = "automl"
+ COMPONENT = "component"
+ BASE = "base"
+ PARALLEL = "parallel"
+ IMPORT = "import"
+ SPARK = "spark"
+ DATA_TRANSFER = "data_transfer"
+ FINE_TUNING = "finetuning"
+ DISTILLATION = "distillation"
+
+
+class JobLimitsType(object):
+ SWEEP = "Sweep"
+
+
+class JobLogPattern:
+ COMMAND_JOB_LOG_PATTERN = "azureml-logs/[\\d]{2}.+\\.txt"
+ PIPELINE_JOB_LOG_PATTERN = "logs/azureml/executionlogs\\.txt"
+ SWEEP_JOB_LOG_PATTERN = "azureml-logs/hyperdrive\\.txt"
+ COMMON_RUNTIME_STREAM_LOG_PATTERN = "user_logs/std_log[\\D]*[0]*(?:_ps)?\\.txt"
+ COMMON_RUNTIME_ALL_USER_LOG_PATTERN = "user_logs/std_log.*\\.txt"
+
+
+class JobServices:
+ STUDIO = "Studio"
+
+
+class ImportSourceType:
+ AZURESQLDB = "azuresqldb"
+ AZURESYNAPSEANALYTICS = "azuresynapseanalytics"
+ SNOWFLAKE = "snowflake"
+ S3 = "s3"
+
+
+class JobComputePropertyFields(object):
+ # Legacy name
+ AISUPERCOMPUTER = "AISuperComputer"
+ SINGULARITY = "Singularity"
+ ITP = "itp"
+ TARGET_SELECTOR = "target_selector"
+
+
+class SparkConfKey:
+ DRIVER_CORES = "driver_cores"
+ DRIVER_MEMORY = "driver_memory"
+ EXECUTOR_CORES = "executor_cores"
+ EXECUTOR_MEMORY = "executor_memory"
+ EXECUTOR_INSTANCES = "executor_instances"
+ DYNAMIC_ALLOCATION_MIN_EXECUTORS = "dynamic_allocation_min_executors"
+ DYNAMIC_ALLOCATION_MAX_EXECUTORS = "dynamic_allocation_max_executors"
+ DYNAMIC_ALLOCATION_ENABLED = "dynamic_allocation_enabled"
+
+
+class RestSparkConfKey:
+ DRIVER_CORES = "spark.driver.cores"
+ DRIVER_MEMORY = "spark.driver.memory"
+ EXECUTOR_CORES = "spark.executor.cores"
+ EXECUTOR_MEMORY = "spark.executor.memory"
+ EXECUTOR_INSTANCES = "spark.executor.instances"
+ DYNAMIC_ALLOCATION_MIN_EXECUTORS = "spark.dynamicAllocation.minExecutors"
+ DYNAMIC_ALLOCATION_MAX_EXECUTORS = "spark.dynamicAllocation.maxExecutors"
+ DYNAMIC_ALLOCATION_ENABLED = "spark.dynamicAllocation.enabled"
+
+
+class JobServiceTypeNames:
+ class EntityNames:
+ CUSTOM = "custom"
+ TRACKING = "Tracking"
+ STUDIO = "Studio"
+ JUPYTER_LAB = "jupyter_lab"
+ SSH = "ssh"
+ TENSOR_BOARD = "tensor_board"
+ VS_CODE = "vs_code"
+ RAY_DASHBOARD = "RayDashboard"
+
+ class RestNames:
+ CUSTOM = "Custom"
+ TRACKING = "Tracking"
+ STUDIO = "Studio"
+ JUPYTER_LAB = "JupyterLab"
+ SSH = "SSH"
+ TENSOR_BOARD = "TensorBoard"
+ VS_CODE = "VSCode"
+ RAY_DASHBOARD = "RayDashboard"
+
+ ENTITY_TO_REST = {
+ EntityNames.CUSTOM: RestNames.CUSTOM,
+ EntityNames.TRACKING: RestNames.TRACKING,
+ EntityNames.STUDIO: RestNames.STUDIO,
+ EntityNames.JUPYTER_LAB: RestNames.JUPYTER_LAB,
+ EntityNames.SSH: RestNames.SSH,
+ EntityNames.TENSOR_BOARD: RestNames.TENSOR_BOARD,
+ EntityNames.VS_CODE: RestNames.VS_CODE,
+ EntityNames.RAY_DASHBOARD: RestNames.RAY_DASHBOARD,
+ }
+
+ REST_TO_ENTITY = {v: k for k, v in ENTITY_TO_REST.items()}
+
+ NAMES_ALLOWED_FOR_PUBLIC = [EntityNames.JUPYTER_LAB, EntityNames.SSH, EntityNames.TENSOR_BOARD, EntityNames.VS_CODE]
+
+
+class JobTierNames:
+ class EntityNames:
+ Spot = "spot"
+ Basic = "basic"
+ Standard = "standard"
+ Premium = "premium"
+ Null = "null"
+
+ class RestNames:
+ Null = "Null"
+ Spot = "Spot"
+ Basic = "Basic"
+ Standard = "Standard"
+ Premium = "Premium"
+
+ ENTITY_TO_REST = {
+ EntityNames.Null: RestNames.Null,
+ EntityNames.Spot: RestNames.Spot,
+ EntityNames.Basic: RestNames.Basic,
+ EntityNames.Standard: RestNames.Standard,
+ EntityNames.Premium: RestNames.Premium,
+ }
+
+ REST_TO_ENTITY = {v: k for k, v in ENTITY_TO_REST.items()}
+
+ ALLOWED_NAMES = [EntityNames.Spot, EntityNames.Basic, EntityNames.Standard, EntityNames.Premium, EntityNames.Null]
+
+
+class JobPriorityValues:
+ class EntityValues:
+ LOW = "low"
+ MEDIUM = "medium"
+ HIGH = "high"
+
+ class RestValues:
+ LOW = 1
+ MEDIUM = 2
+ HIGH = 3
+
+ ENTITY_TO_REST = {
+ EntityValues.LOW: RestValues.LOW,
+ EntityValues.MEDIUM: RestValues.MEDIUM,
+ EntityValues.HIGH: RestValues.HIGH,
+ }
+
+ REST_TO_ENTITY = {v: k for k, v in ENTITY_TO_REST.items()}
+
+ ALLOWED_VALUES = [EntityValues.LOW, EntityValues.MEDIUM, EntityValues.HIGH]
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/constants/_job/pipeline.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/constants/_job/pipeline.py
new file mode 100644
index 00000000..4b66d401
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/constants/_job/pipeline.py
@@ -0,0 +1,64 @@
+# ---------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# ---------------------------------------------------------
+
+
+class PipelineConstants:
+ DEFAULT_DATASTORE_SDK = "default_datastore_name"
+ DEFAULT_DATASTORE_REST = "defaultDatastoreName"
+ DEFAULT_DATASTORE = "default_datastore"
+ DEFAULT_COMPUTE = "default_compute"
+ CONTINUE_ON_STEP_FAILURE = "continue_on_step_failure"
+ CONTINUE_RUN_ON_FAILED_OPTIONAL_INPUT = "continue_run_on_failed_optional_input"
+ DATASTORE_REST = "Datastore"
+ ENVIRONMENT = "environment"
+ CODE = "code"
+ REUSED_FLAG_FIELD = "azureml.isreused"
+ REUSED_FLAG_TRUE = "true"
+ REUSED_JOB_ID = "azureml.reusedrunid"
+ PIPELINE_JOB_TYPE = "azureml.pipelinejob"
+
+
+class ValidationErrorCode:
+ PARAMETER_TYPE_UNKNOWN = "ParameterTypeUnknown"
+
+
+# Methods in Python dictionary, when used as IO name, will actually get function rather than IO object,
+# resulting in validation error.
+# So print warning message on this and suggest user to access with syntax "d[key]" instead of "d.key".
+# Reference: builtins.py::dict
+COMPONENT_IO_KEYWORDS = {
+ "clear",
+ "copy",
+ "fromkeys",
+ "get",
+ "items",
+ "keys",
+ "pop",
+ "popitem",
+ "setdefault",
+ "update",
+ "values",
+ "__class_getitem__",
+ "__contains__",
+ "__delitem__",
+ "__eq__",
+ "__getattribute__",
+ "__getitem__",
+ "__ge__",
+ "__init__",
+ "__ior__",
+ "__iter__",
+ "__len__",
+ "__le__",
+ "__lt__",
+ "__new__",
+ "__ne__",
+ "__or__",
+ "__repr__",
+ "__reversed__",
+ "__ror__",
+ "__setitem__",
+ "__sizeof__",
+ "__hash__",
+}
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/constants/_job/sweep.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/constants/_job/sweep.py
new file mode 100644
index 00000000..4dbbe80c
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/constants/_job/sweep.py
@@ -0,0 +1,22 @@
+# ---------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# ---------------------------------------------------------
+
+
+class SearchSpace:
+ # Hyperparameter search constants
+ CHOICE = "choice"
+ UNIFORM = "uniform"
+ LOGUNIFORM = "loguniform"
+ QUNIFORM = "quniform"
+ QLOGUNIFORM = "qloguniform"
+ NORMAL = "normal"
+ LOGNORMAL = "lognormal"
+ QNORMAL = "qnormal"
+ QLOGNORMAL = "qlognormal"
+ RANDINT = "randint"
+
+ UNIFORM_LOGUNIFORM = [UNIFORM, LOGUNIFORM]
+ QUNIFORM_QLOGUNIFORM = [QUNIFORM, QLOGUNIFORM]
+ NORMAL_LOGNORMAL = [NORMAL, LOGNORMAL]
+ QNORMAL_QLOGNORMAL = [QNORMAL, QLOGNORMAL]
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/constants/_monitoring.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/constants/_monitoring.py
new file mode 100644
index 00000000..224102ca
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/constants/_monitoring.py
@@ -0,0 +1,123 @@
+# ---------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# ---------------------------------------------------------
+
+from enum import Enum
+
+from azure.core import CaseInsensitiveEnumMeta
+
+from azure.ai.ml._utils._experimental import experimental
+
+
+ALL_FEATURES = "all_features"
+
+
+AZMONITORING = "azmonitoring"
+
+DEPLOYMENT_MODEL_INPUTS_NAME_KEY = "data_collector.collections.model_inputs.data.name"
+DEPLOYMENT_MODEL_INPUTS_VERSION_KEY = "data_collector.collections.model_inputs.data.version"
+DEPLOYMENT_MODEL_OUTPUTS_NAME_KEY = "data_collector.collections.model_outputs.data.name"
+DEPLOYMENT_MODEL_OUTPUTS_VERSION_KEY = "data_collector.collections.model_outputs.data.version"
+DEPLOYMENT_MODEL_INPUTS_COLLECTION_KEY = "data_collector.collections.model_inputs.enabled"
+DEPLOYMENT_MODEL_OUTPUTS_COLLECTION_KEY = "data_collector.collections.model_outputs.enabled"
+
+
+SPARK_INSTANCE_TYPE_KEY = "compute.spark.resources.instance_type"
+SPARK_RUNTIME_VERSION = "compute.spark.resources.runtime_version"
+
+COMPUTE_AML_TYPE = "AmlToken"
+COMPUTE_MANAGED_IDENTITY_TYPE = "ManagedIdentity"
+
+DEFAULT_DATA_DRIFT_SIGNAL_NAME = "data-drift-signal"
+DEFAULT_PREDICTION_DRIFT_SIGNAL_NAME = "prediction-drift-signal"
+DEFAULT_DATA_QUALITY_SIGNAL_NAME = "data-quality-signal"
+DEFAULT_TOKEN_USAGE_SIGNAL_NAME = "token-usage-signal"
+
+
+@experimental
+class MonitorSignalType(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ DATA_DRIFT = "data_drift"
+ DATA_QUALITY = "data_quality"
+ PREDICTION_DRIFT = "prediction_drift"
+ MODEL_PERFORMANCE = "model_performance"
+ FEATURE_ATTRIBUTION_DRIFT = "feature_attribution_drift"
+ CUSTOM = "custom"
+ GENERATION_SAFETY_QUALITY = "generation_safety_quality"
+ GENERATION_TOKEN_STATISTICS = "generation_token_statistics"
+
+
+@experimental
+class MonitorMetricName(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ JENSEN_SHANNON_DISTANCE = "jensen_shannon_distance"
+ NORMALIZED_WASSERSTEIN_DISTANCE = "normalized_wasserstein_distance"
+ POPULATION_STABILITY_INDEX = "population_stability_index"
+ TWO_SAMPLE_KOLMOGOROV_SMIRNOV_TEST = "two_sample_kolmogorov_smirnov_test"
+ PEARSONS_CHI_SQUARED_TEST = "pearsons_chi_squared_test"
+ NULL_VALUE_RATE = "null_value_rate"
+ DATA_TYPE_ERROR_RATE = "data_type_error_rate"
+ OUT_OF_BOUND_RATE = "out_of_bounds_rate"
+ NORMALIZED_DISCOUNTED_CUMULATIVE_GAIN = "normalized_discounted_cumulative_gain"
+ ACCURACY = "accuracy"
+ PRECISION = "precision"
+ RECALL = "recall"
+ F1_SCORE = "f1_score"
+ MAE = "MAE"
+ MSE = "MSE"
+ RMSE = "RMSE"
+
+
+@experimental
+class MonitorModelType(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ CLASSIFICATION = "classification"
+ REGRESSION = "regression"
+
+
+@experimental
+class MonitorFeatureType(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ NUMERICAL = "numerical"
+ CATEGORICAL = "categorical"
+ NOT_APPLICABLE = "not_applicable"
+ ALL_FEATURE_TYPES = "all_feature_types"
+
+
+@experimental
+class MonitorDatasetContext(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ MODEL_INPUTS = "model_inputs"
+ MODEL_OUTPUTS = "model_outputs"
+ TRAINING = "training"
+ TEST = "test"
+ VALIDATION = "validation"
+ GROUND_TRUTH_DATA = "ground_truth"
+
+
+class MonitorTargetTasks(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ CLASSIFICATION = "Classification"
+ REGRESSION = "Regression"
+ QUESTION_ANSWERING = "QuestionAnswering"
+
+
+class MonitorInputDataType(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ #: An input data with a fixed window size.
+ STATIC = "Static"
+ #: An input data which trailing relatively to the monitor's current run.
+ TRAILING = "Trailing"
+ #: An input data with tabular format which doesn't require preprocessing.
+ FIXED = "Fixed"
+
+
+class FADColumnNames(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ PREDICTION = "prediction"
+ PREDICTION_PROBABILITY = "prediction_probability"
+ CORRELATION_ID = "correlation_id"
+
+
+class MonitorFeatureDataType(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ NUMERICAL = "numerical"
+ CATEGORICAL = "categorical"
+
+
+class NumericalMetricThresholds(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ JENSEN_SHANNON_DISTANCE = "jensen_shannon_distance"
+ NORMALIZED_WASSERSTEIN_DISTANCE = "normalized_wasserstein_distance"
+ POPULATION_STABILITY_INDEX = "population_stability_index"
+ TWO_SAMPLE_KOLMOGOROV_SMIRNOV_TEST = "two_sample_kolmogorov_smirnov_test"
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/constants/_registry.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/constants/_registry.py
new file mode 100644
index 00000000..23cffeb1
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/constants/_registry.py
@@ -0,0 +1,40 @@
+# ---------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# ---------------------------------------------------------
+
+import re
+from enum import Enum
+
+from azure.core import CaseInsensitiveEnumMeta
+
+
+class StorageAccountType(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ """Storage account types."""
+
+ STANDARD_LRS = "Standard_LRS".lower()
+ STANDARD_GRS = "Standard_GRS".lower()
+ STANDARD_RAGRS = "Standard_RAGRS".lower()
+ STANDARD_ZRS = "Standard_ZRS".lower()
+ STANDARD_GZRS = "Standard_GZRS".lower()
+ STANDARD_RAGZRS = "Standard_RAGZRS".lower()
+ PREMIUM_LRS = "Premium_LRS".lower()
+ PREMIUM_ZRS = "Premium_ZRS".lower()
+
+
+# When will other values be allowed?
+class AcrAccountSku(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ """Azure Container Registry SKUs."""
+
+ PREMIUM = "Premium".lower()
+
+
+# based on /subscriptions/{SubscriptionId}/resourceGroups/{ResourceGroupName}/
+# # ...providers/Microsoft.Storage/storageAccounts/{StorageAccountName}
+STORAGE_ACCOUNT_FORMAT = re.compile(
+ ("/subscriptions/(.*)/resourceGroups/(.*)/providers/Microsoft.Storage/storageAccounts/(.*)")
+)
+# based on /subscriptions/{SubscriptionId}/resourceGroups/{ResourceGroupName}/
+# # ...providers/Microsoft.ContainerRegistry/registries/{AcrName}\
+ACR_ACCOUNT_FORMAT = re.compile(
+ ("/subscriptions/(.*)/resourceGroups/(.*)/providers/Microsoft.ContainerRegistry/registries/(.*)")
+)
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/constants/_workspace.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/constants/_workspace.py
new file mode 100644
index 00000000..359e211d
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/constants/_workspace.py
@@ -0,0 +1,56 @@
+# ---------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# ---------------------------------------------------------
+from enum import Enum
+
+from azure.ai.ml._utils._experimental import experimental
+from azure.core import CaseInsensitiveEnumMeta
+
+
+class ManagedServiceIdentityType(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ """Type of managed service identity (where both SystemAssigned and UserAssigned types are allowed)."""
+
+ NONE = "None"
+ SYSTEM_ASSIGNED = "SystemAssigned"
+ USER_ASSIGNED = "UserAssigned"
+ SYSTEM_ASSIGNED_USER_ASSIGNED = "SystemAssigned,UserAssigned"
+
+
+class IsolationMode:
+ """IsolationMode for the workspace managed network."""
+
+ DISABLED = "Disabled"
+ ALLOW_INTERNET_OUTBOUND = "AllowInternetOutbound"
+ ALLOW_ONLY_APPROVED_OUTBOUND = "AllowOnlyApprovedOutbound"
+
+
+@experimental
+class FirewallSku:
+ """Firewall Sku for FQDN rules in AllowOnlyApprovedOutbound."""
+
+ STANDARD = "Standard"
+ BASIC = "Basic"
+
+
+class OutboundRuleCategory:
+ """Category for a managed network outbound rule."""
+
+ REQUIRED = "Required"
+ RECOMMENDED = "Recommended"
+ USER_DEFINED = "UserDefined"
+ DEPENDENCY = "Dependency"
+
+
+class OutboundRuleType:
+ """Type of managed network outbound rule."""
+
+ FQDN = "FQDN"
+ PRIVATE_ENDPOINT = "PrivateEndpoint"
+ SERVICE_TAG = "ServiceTag"
+
+
+@experimental
+class CapabilityHostKind(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ """Capabilityhost kind."""
+
+ AGENTS = "Agents"