aboutsummaryrefslogtreecommitdiff
path: root/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_compute
diff options
context:
space:
mode:
Diffstat (limited to '.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_compute')
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_compute/__init__.py5
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_compute/_aml_compute_node_info.py50
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_compute/_custom_applications.py221
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_compute/_image_metadata.py63
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_compute/_schedule.py153
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_compute/_setup_scripts.py90
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_compute/_usage.py100
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_compute/_vm_size.py104
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_compute/aml_compute.py281
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_compute/compute.py261
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_compute/compute_instance.py511
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_compute/kubernetes_compute.py105
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_compute/synapsespark_compute.py234
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_compute/unsupported_compute.py62
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_compute/virtual_machine_compute.py172
15 files changed, 2412 insertions, 0 deletions
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_compute/__init__.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_compute/__init__.py
new file mode 100644
index 00000000..fdf8caba
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_compute/__init__.py
@@ -0,0 +1,5 @@
+# ---------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# ---------------------------------------------------------
+
+__path__ = __import__("pkgutil").extend_path(__path__, __name__)
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_compute/_aml_compute_node_info.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_compute/_aml_compute_node_info.py
new file mode 100644
index 00000000..823a89ca
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_compute/_aml_compute_node_info.py
@@ -0,0 +1,50 @@
+# ---------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# ---------------------------------------------------------
+
+from typing import Dict, Optional
+
+from azure.ai.ml._restclient.v2022_10_01_preview.models import AmlComputeNodeInformation
+from azure.ai.ml._schema.compute.aml_compute_node_info import AmlComputeNodeInfoSchema
+from azure.ai.ml.constants._common import BASE_PATH_CONTEXT_KEY
+
+
+class AmlComputeNodeInfo:
+ """Compute node information related to AmlCompute."""
+
+ def __init__(self) -> None:
+ self.node_id = None
+ self.private_ip_address = None
+ self.public_ip_address = None
+ self.port = None
+ self.node_state = None
+ self.run_id: Optional[str] = None
+
+ @property
+ def current_job_name(self) -> Optional[str]:
+ """The run ID of the current job.
+
+ :return: The run ID of the current job.
+ :rtype: str
+ """
+ return self.run_id
+
+ @current_job_name.setter
+ def current_job_name(self, value: str) -> None:
+ """Set the current job run ID.
+
+ :param value: The job run ID.
+ :type value: str
+ """
+ self.run_id = value
+
+ @classmethod
+ def _from_rest_object(cls, rest_obj: AmlComputeNodeInformation) -> "AmlComputeNodeInfo":
+ result = cls()
+ result.__dict__.update(rest_obj.as_dict())
+ return result
+
+ def _to_dict(self) -> Dict:
+ # pylint: disable=no-member
+ res: dict = AmlComputeNodeInfoSchema(context={BASE_PATH_CONTEXT_KEY: "./"}).dump(self)
+ return res
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_compute/_custom_applications.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_compute/_custom_applications.py
new file mode 100644
index 00000000..2ee65e7f
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_compute/_custom_applications.py
@@ -0,0 +1,221 @@
+# ---------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# ---------------------------------------------------------
+# pylint: disable=protected-access,redefined-builtin
+
+from typing import Any, Dict, List, Optional
+
+from azure.ai.ml._restclient.v2022_10_01_preview.models import CustomService, Docker
+from azure.ai.ml._restclient.v2022_10_01_preview.models import Endpoint as RestEndpoint
+from azure.ai.ml._restclient.v2022_10_01_preview.models import EnvironmentVariable as RestEnvironmentVariable
+from azure.ai.ml._restclient.v2022_10_01_preview.models import EnvironmentVariableType as RestEnvironmentVariableType
+from azure.ai.ml._restclient.v2022_10_01_preview.models import Image as RestImage
+from azure.ai.ml._restclient.v2022_10_01_preview.models import ImageType as RestImageType
+from azure.ai.ml._restclient.v2022_10_01_preview.models import Protocol
+from azure.ai.ml._restclient.v2022_10_01_preview.models import VolumeDefinition as RestVolumeDefinition
+from azure.ai.ml._restclient.v2022_10_01_preview.models import VolumeDefinitionType as RestVolumeDefinitionType
+from azure.ai.ml.constants._compute import DUPLICATE_APPLICATION_ERROR, INVALID_VALUE_ERROR, CustomApplicationDefaults
+from azure.ai.ml.exceptions import ErrorCategory, ErrorTarget, ValidationException
+
+
+class ImageSettings:
+ """Specifies an image configuration for a Custom Application.
+
+ :param reference: Image reference URL.
+ :type reference: str
+ """
+
+ def __init__(self, *, reference: str):
+ self.reference = reference
+
+ def _to_rest_object(self) -> RestImage:
+ return RestImage(type=RestImageType.DOCKER, reference=self.reference)
+
+ @classmethod
+ def _from_rest_object(cls, obj: RestImage) -> "ImageSettings":
+ return ImageSettings(reference=obj.reference)
+
+
+class EndpointsSettings:
+ """Specifies an endpoint configuration for a Custom Application.
+
+ :param target: Application port inside the container.
+ :type target: int
+ :param published: Port over which the application is exposed from container.
+ :type published: int
+ """
+
+ def __init__(self, *, target: int, published: int):
+ EndpointsSettings._validate_endpoint_settings(target=target, published=published)
+ self.target = target
+ self.published = published
+
+ def _to_rest_object(self) -> RestEndpoint:
+ return RestEndpoint(
+ name=CustomApplicationDefaults.ENDPOINT_NAME,
+ target=self.target,
+ published=self.published,
+ protocol=Protocol.HTTP,
+ )
+
+ @classmethod
+ def _from_rest_object(cls, obj: RestEndpoint) -> "EndpointsSettings":
+ return EndpointsSettings(target=obj.target, published=obj.published)
+
+ @classmethod
+ def _validate_endpoint_settings(cls, target: int, published: int) -> None:
+ ports = {
+ CustomApplicationDefaults.TARGET_PORT: target,
+ CustomApplicationDefaults.PUBLISHED_PORT: published,
+ }
+ min_value = CustomApplicationDefaults.PORT_MIN_VALUE
+ max_value = CustomApplicationDefaults.PORT_MAX_VALUE
+
+ for port_name, port in ports.items():
+ message = INVALID_VALUE_ERROR.format(port_name, min_value, max_value)
+ if not min_value < port < max_value:
+ raise ValidationException(
+ message=message,
+ target=ErrorTarget.COMPUTE,
+ no_personal_data_message=message,
+ error_category=ErrorCategory.USER_ERROR,
+ )
+
+
+class VolumeSettings:
+ """Specifies the Bind Mount settings for a Custom Application.
+
+ :param source: The host path of the mount.
+ :type source: str
+ :param target: The path in the container for the mount.
+ :type target: str
+ """
+
+ def __init__(self, *, source: str, target: str):
+ self.source = source
+ self.target = target
+
+ def _to_rest_object(self) -> RestVolumeDefinition:
+ return RestVolumeDefinition(
+ type=RestVolumeDefinitionType.BIND,
+ read_only=False,
+ source=self.source,
+ target=self.target,
+ )
+
+ @classmethod
+ def _from_rest_object(cls, obj: RestVolumeDefinition) -> "VolumeSettings":
+ return VolumeSettings(source=obj.source, target=obj.target)
+
+
+class CustomApplications:
+ """Specifies the custom service application configuration.
+
+ :param name: Name of the Custom Application.
+ :type name: str
+ :param image: Describes the Image Specifications.
+ :type image: ImageSettings
+ :param type: Type of the Custom Application.
+ :type type: Optional[str]
+ :param endpoints: Configuring the endpoints for the container.
+ :type endpoints: List[EndpointsSettings]
+ :param environment_variables: Environment Variables for the container.
+ :type environment_variables: Optional[Dict[str, str]]
+ :param bind_mounts: Configuration of the bind mounts for the container.
+ :type bind_mounts: Optional[List[VolumeSettings]]
+ """
+
+ def __init__(
+ self,
+ *,
+ name: str,
+ image: ImageSettings,
+ type: str = CustomApplicationDefaults.DOCKER,
+ endpoints: List[EndpointsSettings],
+ environment_variables: Optional[Dict] = None,
+ bind_mounts: Optional[List[VolumeSettings]] = None,
+ **kwargs: Any
+ ):
+ self.name = name
+ self.type = type
+ self.image = image
+ self.endpoints = endpoints
+ self.environment_variables = environment_variables
+ self.bind_mounts = bind_mounts
+ self.additional_properties = kwargs
+
+ def _to_rest_object(self) -> CustomService:
+ endpoints = None
+ if self.endpoints:
+ endpoints = [endpoint._to_rest_object() for endpoint in self.endpoints]
+
+ environment_variables = None
+ if self.environment_variables:
+ environment_variables = {
+ name: RestEnvironmentVariable(type=RestEnvironmentVariableType.LOCAL, value=value)
+ for name, value in self.environment_variables.items()
+ }
+
+ volumes = None
+ if self.bind_mounts:
+ volumes = [volume._to_rest_object() for volume in self.bind_mounts]
+
+ return CustomService(
+ name=self.name,
+ image=self.image._to_rest_object(),
+ endpoints=endpoints,
+ environment_variables=environment_variables,
+ volumes=volumes,
+ docker=Docker(privileged=True),
+ additional_properties={**{"type": self.type}, **self.additional_properties},
+ )
+
+ @classmethod
+ def _from_rest_object(cls, obj: CustomService) -> "CustomApplications":
+ endpoints = []
+ for endpoint in obj.endpoints:
+ endpoints.append(EndpointsSettings._from_rest_object(endpoint))
+
+ environment_variables = (
+ {name: value.value for name, value in obj.environment_variables.items()}
+ if obj.environment_variables
+ else None
+ )
+
+ bind_mounts = []
+ if obj.volumes:
+ for volume in obj.volumes:
+ bind_mounts.append(VolumeSettings._from_rest_object(volume))
+
+ return CustomApplications(
+ name=obj.name,
+ image=ImageSettings._from_rest_object(obj.image),
+ endpoints=endpoints,
+ environment_variables=environment_variables,
+ bind_mounts=bind_mounts,
+ type=obj.additional_properties.pop("type", CustomApplicationDefaults.DOCKER),
+ **obj.additional_properties,
+ )
+
+
+def validate_custom_applications(custom_apps: List[CustomApplications]) -> None:
+ message = DUPLICATE_APPLICATION_ERROR
+
+ names = [app.name for app in custom_apps]
+ if len(set(names)) != len(names):
+ raise ValidationException(
+ message=message.format("application_name"),
+ target=ErrorTarget.COMPUTE,
+ no_personal_data_message=message.format("application_name"),
+ error_category=ErrorCategory.USER_ERROR,
+ )
+
+ published_ports = [endpoint.published for app in custom_apps for endpoint in app.endpoints]
+
+ if len(set(published_ports)) != len(published_ports):
+ raise ValidationException(
+ message=message.format("published_port"),
+ target=ErrorTarget.COMPUTE,
+ no_personal_data_message=message.format("published_port"),
+ error_category=ErrorCategory.USER_ERROR,
+ )
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_compute/_image_metadata.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_compute/_image_metadata.py
new file mode 100644
index 00000000..342e4a97
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_compute/_image_metadata.py
@@ -0,0 +1,63 @@
+# ---------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# ---------------------------------------------------------
+from typing import Optional
+
+
+class ImageMetadata:
+ """Metadata about the operating system image for the compute instance.
+
+ :param is_latest_os_image_version: Specifies if the compute instance is running on the latest OS image version.
+ :type is_latest_os_image_version: bool
+ :param current_image_version: Version of the current image.
+ :type current_image_version: str
+ :param latest_image_version: The latest image version.
+ :type latest_image_version: str
+
+ .. admonition:: Example:
+
+ .. literalinclude:: ../samples/ml_samples_compute.py
+ :start-after: [START image_metadata]
+ :end-before: [END image_metadata]
+ :language: python
+ :dedent: 8
+ :caption: Creating a ImageMetadata object.
+ """
+
+ def __init__(
+ self,
+ *,
+ is_latest_os_image_version: Optional[bool],
+ current_image_version: Optional[str],
+ latest_image_version: Optional[str]
+ ) -> None:
+ self._is_latest_os_image_version = is_latest_os_image_version
+ self._current_image_version = current_image_version
+ self._latest_image_version = latest_image_version
+
+ @property
+ def is_latest_os_image_version(self) -> Optional[bool]:
+ """Whether or not a compute instance is running on the latest OS image version.
+
+ :return: Boolean indicating if the compute instance is running the latest OS image version.
+ :rtype: bool
+ """
+ return self._is_latest_os_image_version
+
+ @property
+ def current_image_version(self) -> Optional[str]:
+ """The current OS image version number.
+
+ :return: The current OS image version number.
+ :rtype: str
+ """
+ return self._current_image_version
+
+ @property
+ def latest_image_version(self) -> Optional[str]:
+ """The latest OS image version number.
+
+ :return: The latest OS image version number.
+ :rtype: str
+ """
+ return self._latest_image_version
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_compute/_schedule.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_compute/_schedule.py
new file mode 100644
index 00000000..3616a5cc
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_compute/_schedule.py
@@ -0,0 +1,153 @@
+# ---------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# ---------------------------------------------------------
+# pylint: disable=protected-access
+from typing import Any, List, Optional, Union
+
+from azure.ai.ml._restclient.v2022_10_01_preview.models import ComputePowerAction
+from azure.ai.ml._restclient.v2022_10_01_preview.models import ComputeSchedules as RestComputeSchedules
+from azure.ai.ml._restclient.v2022_10_01_preview.models import ComputeStartStopSchedule as RestComputeStartStopSchedule
+from azure.ai.ml._restclient.v2022_10_01_preview.models import ScheduleStatus as ScheduleState
+from azure.ai.ml._restclient.v2022_10_01_preview.models import TriggerType
+from azure.ai.ml.entities._mixins import RestTranslatableMixin
+
+from .._schedule.trigger import CronTrigger, RecurrencePattern, RecurrenceTrigger
+
+
+class ComputeStartStopSchedule(RestTranslatableMixin):
+ """Schedules for compute start or stop scenario.
+
+ :param trigger: The trigger of the schedule.
+ :type trigger: Union[~azure.ai.ml.entities.CronTrigger, ~azure.ai.ml.entities.RecurrenceTrigger]
+ :param action: The compute power action.
+ :type action: ~azure.ai.ml.entities.ComputePowerAction
+ :param state: The state of the schedule.
+ :type state: ~azure.ai.ml.entities.ScheduleState
+ :param kwargs: A dictionary of additional configuration parameters.
+ :type kwargs: dict
+
+ .. admonition:: Example:
+
+ .. literalinclude:: ../samples/ml_samples_compute.py
+ :start-after: [START compute_start_stop_schedule]
+ :end-before: [END compute_start_stop_schedule]
+ :language: python
+ :dedent: 8
+ :caption: Creating a ComputeStartStopSchedule object.
+ """
+
+ def __init__(
+ self,
+ *,
+ trigger: Optional[Union[CronTrigger, RecurrenceTrigger]] = None,
+ action: Optional[ComputePowerAction] = None,
+ state: ScheduleState = ScheduleState.ENABLED,
+ **kwargs: Any
+ ) -> None:
+ self.trigger = trigger
+ self.action = action
+ self.state = state
+ self._schedule_id: Optional[str] = kwargs.pop("schedule_id", None)
+ self._provisioning_state: Optional[str] = kwargs.pop("provisioning_state", None)
+
+ @property
+ def schedule_id(self) -> Optional[str]:
+ """The schedule ID.
+
+ :return: The schedule ID.
+ :rtype: Optional[str]
+ """
+ return self._schedule_id
+
+ @property
+ def provisioning_state(self) -> Optional[str]:
+ """The schedule provisioning state.
+
+ :return: The schedule provisioning state.
+ :rtype: Optional[str]
+ """
+ return self._provisioning_state
+
+ def _to_rest_object(self) -> RestComputeStartStopSchedule:
+ rest_object = RestComputeStartStopSchedule(
+ action=self.action,
+ status=self.state,
+ )
+
+ if isinstance(self.trigger, CronTrigger):
+ rest_object.trigger_type = TriggerType.CRON
+ rest_object.cron = self.trigger._to_rest_compute_cron_object()
+ elif isinstance(self.trigger, RecurrenceTrigger):
+ rest_object.trigger_type = TriggerType.RECURRENCE
+ rest_object.recurrence = self.trigger._to_rest_compute_recurrence_object()
+
+ return rest_object
+
+ @classmethod
+ def _from_rest_object(cls, obj: RestComputeStartStopSchedule) -> "ComputeStartStopSchedule":
+ schedule = ComputeStartStopSchedule(
+ action=obj.action,
+ state=obj.status,
+ schedule_id=obj.id,
+ provisioning_state=obj.provisioning_status,
+ )
+
+ if obj.trigger_type == TriggerType.CRON:
+ schedule.trigger = CronTrigger(
+ start_time=obj.cron.start_time,
+ time_zone=obj.cron.time_zone,
+ expression=obj.cron.expression,
+ )
+ elif obj.trigger_type == TriggerType.RECURRENCE:
+ schedule.trigger = RecurrenceTrigger(
+ start_time=obj.recurrence.start_time,
+ time_zone=obj.recurrence.time_zone,
+ frequency=obj.recurrence.frequency,
+ interval=obj.recurrence.interval,
+ schedule=RecurrencePattern._from_rest_object(obj.recurrence.schedule),
+ )
+
+ return schedule
+
+
+class ComputeSchedules(RestTranslatableMixin):
+ """Compute schedules.
+
+ :param compute_start_stop: Compute start or stop schedules.
+ :type compute_start_stop: List[~azure.ai.ml.entities.ComputeStartStopSchedule]
+ :param kwargs: A dictionary of additional configuration parameters.
+ :type kwargs: dict
+
+ .. admonition:: Example:
+
+ .. literalinclude:: ../samples/ml_samples_compute.py
+ :start-after: [START compute_start_stop_schedule]
+ :end-before: [END compute_start_stop_schedule]
+ :language: python
+ :dedent: 8
+ :caption: Creating a ComputeSchedules object.
+ """
+
+ def __init__(self, *, compute_start_stop: Optional[List[ComputeStartStopSchedule]] = None) -> None:
+ self.compute_start_stop = compute_start_stop
+
+ def _to_rest_object(self) -> RestComputeSchedules:
+ rest_schedules: List[RestComputeStartStopSchedule] = []
+ if self.compute_start_stop:
+ for schedule in self.compute_start_stop:
+ rest_schedules.append(schedule._to_rest_object())
+
+ return RestComputeSchedules(
+ compute_start_stop=rest_schedules,
+ )
+
+ @classmethod
+ def _from_rest_object(cls, obj: RestComputeSchedules) -> "ComputeSchedules":
+ schedules: List[ComputeStartStopSchedule] = []
+ if obj.compute_start_stop:
+ for schedule in obj.compute_start_stop:
+ schedules.append(ComputeStartStopSchedule._from_rest_object(schedule))
+
+ return ComputeSchedules(
+ compute_start_stop=schedules,
+ )
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_compute/_setup_scripts.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_compute/_setup_scripts.py
new file mode 100644
index 00000000..d2e12fd4
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_compute/_setup_scripts.py
@@ -0,0 +1,90 @@
+# ---------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# ---------------------------------------------------------
+# pylint: disable=protected-access
+
+import re
+from typing import Optional, cast
+
+from azure.ai.ml._restclient.v2022_10_01_preview.models import ScriptReference as RestScriptReference
+from azure.ai.ml._restclient.v2022_10_01_preview.models import ScriptsToExecute as RestScriptsToExecute
+from azure.ai.ml._restclient.v2022_10_01_preview.models import SetupScripts as RestSetupScripts
+from azure.ai.ml.entities._mixins import RestTranslatableMixin
+
+
+class ScriptReference(RestTranslatableMixin):
+ """Script reference.
+
+ :keyword path: The location of scripts in workspace storage.
+ :paramtype path: Optional[str]
+ :keyword command: Command line arguments passed to the script to run.
+ :paramtype command: Optional[str]
+ :keyword timeout_minutes: Timeout, in minutes, for the script to run.
+ :paramtype timeout_minutes: Optional[int]
+ """
+
+ def __init__(
+ self, *, path: Optional[str] = None, command: Optional[str] = None, timeout_minutes: Optional[int] = None
+ ) -> None:
+ self.path = path
+ self.command = command
+ self.timeout_minutes = timeout_minutes
+
+ def _to_rest_object(self) -> RestScriptReference:
+ return RestScriptReference(
+ script_source="workspaceStorage",
+ script_data=self.path,
+ script_arguments=self.command,
+ timeout=f"{self.timeout_minutes}m",
+ )
+
+ @classmethod
+ def _from_rest_object(cls, obj: RestScriptReference) -> Optional["ScriptReference"]:
+ if obj is None:
+ return obj
+ timeout_match = re.match(r"(\d+)m", obj.timeout) if obj.timeout else None
+ timeout_minutes = timeout_match.group(1) if timeout_match else None
+ script_reference = ScriptReference(
+ path=obj.script_data if obj.script_data else None,
+ command=obj.script_arguments if obj.script_arguments else None,
+ timeout_minutes=cast(Optional[int], timeout_minutes),
+ )
+ return script_reference
+
+
+class SetupScripts(RestTranslatableMixin):
+ """Customized setup scripts.
+
+ :keyword startup_script: The script to be run every time the compute is started.
+ :paramtype startup_script: Optional[~azure.ai.ml.entities.ScriptReference]
+ :keyword creation_script: The script to be run only when the compute is created.
+ :paramtype creation_script: Optional[~azure.ai.ml.entities.ScriptReference]
+ """
+
+ def __init__(
+ self, *, startup_script: Optional[ScriptReference] = None, creation_script: Optional[ScriptReference] = None
+ ) -> None:
+ self.startup_script = startup_script
+ self.creation_script = creation_script
+
+ def _to_rest_object(self) -> RestScriptsToExecute:
+ scripts_to_execute = RestScriptsToExecute(
+ startup_script=self.startup_script._to_rest_object() if self.startup_script else None,
+ creation_script=self.creation_script._to_rest_object() if self.creation_script else None,
+ )
+ return RestSetupScripts(scripts=scripts_to_execute)
+
+ @classmethod
+ def _from_rest_object(cls, obj: RestSetupScripts) -> Optional["SetupScripts"]:
+ if obj is None or obj.scripts is None:
+ return None
+ scripts = obj.scripts
+ setup_scripts = SetupScripts(
+ startup_script=ScriptReference._from_rest_object(
+ scripts.startup_script if scripts.startup_script else None
+ ),
+ creation_script=ScriptReference._from_rest_object(
+ scripts.creation_script if scripts.creation_script else None
+ ),
+ )
+ return setup_scripts
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_compute/_usage.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_compute/_usage.py
new file mode 100644
index 00000000..6702382e
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_compute/_usage.py
@@ -0,0 +1,100 @@
+# ---------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# ---------------------------------------------------------
+
+from abc import abstractmethod
+from os import PathLike
+from typing import IO, Any, AnyStr, Dict, Optional, Union
+
+from azure.ai.ml._restclient.v2022_10_01_preview.models import Usage as RestUsage
+from azure.ai.ml._restclient.v2022_10_01_preview.models import UsageUnit
+from azure.ai.ml._schema.compute.usage import UsageSchema
+from azure.ai.ml._utils.utils import dump_yaml_to_file
+from azure.ai.ml.constants._common import BASE_PATH_CONTEXT_KEY
+from azure.ai.ml.entities._mixins import RestTranslatableMixin
+
+
+class UsageName:
+ def __init__(self, *, value: Optional[str] = None, localized_value: Optional[str] = None) -> None:
+ """The usage name.
+
+ :param value: The name of the resource.
+ :type value: Optional[str]
+ :param localized_value: The localized name of the resource.
+ :type localized_value: Optional[str]
+ """
+ self.value = value
+ self.localized_value = localized_value
+
+
+class Usage(RestTranslatableMixin):
+ """AzureML resource usage.
+
+ :param id: The resource ID.
+ :type id: Optional[str]
+ :param aml_workspace_location: The region of the AzureML workspace specified by the ID.
+ :type aml_workspace_location: Optional[str]
+ :param type: The resource type.
+ :type type: Optional[str]
+ :param unit: The unit of measurement for usage. Accepted value is "Count".
+ :type unit: Optional[Union[str, ~azure.ai.ml.entities.UsageUnit]]
+ :param current_value: The current usage of the resource.
+ :type current_value: Optional[int]
+ :param limit: The maximum permitted usage for the resource.
+ :type limit: Optional[int]
+ :param name: The name of the usage type.
+ :type name: Optional[~azure.ai.ml.entities.UsageName]
+ """
+
+ def __init__(
+ self,
+ id: Optional[str] = None, # pylint: disable=redefined-builtin
+ aml_workspace_location: Optional[str] = None,
+ type: Optional[str] = None, # pylint: disable=redefined-builtin
+ unit: Optional[Union[str, UsageUnit]] = None, # enum
+ current_value: Optional[int] = None,
+ limit: Optional[int] = None,
+ name: Optional[UsageName] = None,
+ ) -> None:
+ self.id = id
+ self.aml_workspace_location = aml_workspace_location
+ self.type = type
+ self.unit = unit
+ self.current_value = current_value
+ self.limit = limit
+ self.name = name
+
+ @classmethod
+ def _from_rest_object(cls, obj: RestUsage) -> "Usage":
+ result = cls()
+ result.__dict__.update(obj.as_dict())
+ return result
+
+ def dump(self, dest: Union[str, PathLike, IO[AnyStr]], **kwargs: Any) -> None:
+ """Dumps the job content into a file in YAML format.
+
+ :param dest: The local path or file stream to write the YAML content to.
+ If dest is a file path, a new file will be created.
+ If dest is an open file, the file will be written to directly.
+ :type dest: Union[PathLike, str, IO[AnyStr]]
+ :raises: FileExistsError if dest is a file path and the file already exists.
+ :raises: IOError if dest is an open file and the file is not writable.
+ """
+ path = kwargs.pop("path", None)
+ yaml_serialized = self._to_dict()
+ dump_yaml_to_file(dest, yaml_serialized, default_flow_style=False, path=path, **kwargs)
+
+ def _to_dict(self) -> Dict:
+ # pylint: disable=no-member
+ res: dict = UsageSchema(context={BASE_PATH_CONTEXT_KEY: "./"}).dump(self)
+ return res
+
+ @classmethod
+ @abstractmethod
+ def _load(
+ cls,
+ path: Union[PathLike, str],
+ params_override: Optional[list] = None,
+ **kwargs: Any,
+ ) -> "Usage":
+ pass
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_compute/_vm_size.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_compute/_vm_size.py
new file mode 100644
index 00000000..2f0049f0
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_compute/_vm_size.py
@@ -0,0 +1,104 @@
+# ---------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# ---------------------------------------------------------
+
+from abc import abstractmethod
+from os import PathLike
+from typing import IO, Any, AnyStr, Dict, List, Optional, Union
+
+from azure.ai.ml._restclient.v2022_10_01_preview.models import VirtualMachineSize
+from azure.ai.ml._schema.compute.vm_size import VmSizeSchema
+from azure.ai.ml._utils.utils import dump_yaml_to_file
+from azure.ai.ml.constants._common import BASE_PATH_CONTEXT_KEY
+from azure.ai.ml.entities._mixins import RestTranslatableMixin
+
+
+class VmSize(RestTranslatableMixin):
+ """Virtual Machine Size.
+
+ :param name: The virtual machine size name.
+ :type name: Optional[str]
+ :param family: The virtual machine size family name.
+ :type family: Optional[str]
+ :param v_cp_us: The number of vCPUs supported by the virtual machine size.
+ :type v_cp_us: Optional[int]
+ :param gpus: The number of GPUs supported by the virtual machine size.
+ :type gpus: Optional[int]
+ :param os_vhd_size_mb: The OS VHD disk size, in MB, allowed by the virtual machine size.
+ :type os_vhd_size_mb: Optional[int]
+ :param max_resource_volume_mb: The resource volume size, in MB, allowed by the virtual machine
+ size.
+ :type max_resource_volume_mb: Optional[int]
+ :param memory_gb: The amount of memory, in GB, supported by the virtual machine size.
+ :type memory_gb: Optional[float]
+ :param low_priority_capable: Specifies if the virtual machine size supports low priority VMs.
+ :type low_priority_capable: Optional[bool]
+ :param premium_io: Specifies if the virtual machine size supports premium IO.
+ :type premium_io: Optional[bool]
+ :param estimated_vm_prices: The estimated price information for using a VM.
+ :type estimated_vm_prices: ~azure.mgmt.machinelearningservices.models.EstimatedVMPrices
+ :param supported_compute_types: Specifies the compute types supported by the virtual machine
+ size.
+ :type supported_compute_types: Optional[list[str]]
+ """
+
+ def __init__(
+ self,
+ name: Optional[str] = None,
+ family: Optional[str] = None,
+ v_cp_us: Optional[int] = None,
+ gpus: Optional[int] = None,
+ os_vhd_size_mb: Optional[int] = None,
+ max_resource_volume_mb: Optional[int] = None,
+ memory_gb: Optional[float] = None,
+ low_priority_capable: Optional[bool] = None,
+ premium_io: Optional[bool] = None,
+ supported_compute_types: Optional[List[str]] = None,
+ ) -> None:
+ self.name = name
+ self.family = family
+ self.v_cp_us = v_cp_us
+ self.gpus = gpus
+ self.os_vhd_size_mb = os_vhd_size_mb
+ self.max_resource_volume_mb = max_resource_volume_mb
+ self.memory_gb = memory_gb
+ self.low_priority_capable = low_priority_capable
+ self.premium_io = premium_io
+ self.supported_compute_types = ",".join(map(str, supported_compute_types)) if supported_compute_types else None
+
+ @classmethod
+ def _from_rest_object(cls, obj: VirtualMachineSize) -> "VmSize":
+ result = cls()
+ result.__dict__.update(obj.as_dict())
+ return result
+
+ def dump(self, dest: Union[str, PathLike, IO[AnyStr]], **kwargs: Any) -> None:
+ """Dump the virtual machine size content into a file in yaml format.
+
+ :param dest: The destination to receive this virtual machine size's content.
+ Must be either a path to a local file, or an already-open file stream.
+ If dest is a file path, a new file will be created,
+ and an exception is raised if the file exists.
+ If dest is an open file, the file will be written to directly,
+ and an exception will be raised if the file is not writable.
+ :type dest: Union[PathLike, str, IO[AnyStr]]
+ """
+
+ path = kwargs.pop("path", None)
+ yaml_serialized = self._to_dict()
+ dump_yaml_to_file(dest, yaml_serialized, default_flow_style=False, path=path, **kwargs)
+
+ def _to_dict(self) -> Dict:
+ # pylint: disable=no-member
+ res: dict = VmSizeSchema(context={BASE_PATH_CONTEXT_KEY: "./"}).dump(self)
+ return res
+
+ @classmethod
+ @abstractmethod
+ def _load(
+ cls,
+ path: Union[PathLike, str],
+ params_override: Optional[list] = None,
+ **kwargs: Any,
+ ) -> "VmSize":
+ pass
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_compute/aml_compute.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_compute/aml_compute.py
new file mode 100644
index 00000000..3ec7c10f
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_compute/aml_compute.py
@@ -0,0 +1,281 @@
+# ---------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# ---------------------------------------------------------
+
+# pylint: disable=protected-access,too-many-instance-attributes
+
+from typing import Any, Dict, Optional
+
+from azure.ai.ml._restclient.v2022_12_01_preview.models import (
+ AmlCompute as AmlComputeRest,
+)
+from azure.ai.ml._restclient.v2022_12_01_preview.models import (
+ AmlComputeProperties,
+ ComputeResource,
+ ResourceId,
+ ScaleSettings,
+ UserAccountCredentials,
+)
+from azure.ai.ml._schema._utils.utils import get_subnet_str
+from azure.ai.ml._schema.compute.aml_compute import AmlComputeSchema
+from azure.ai.ml._utils.utils import (
+ camel_to_snake,
+ snake_to_pascal,
+ to_iso_duration_format,
+)
+from azure.ai.ml.constants._common import BASE_PATH_CONTEXT_KEY, TYPE
+from azure.ai.ml.constants._compute import ComputeDefaults, ComputeType
+from azure.ai.ml.entities._credentials import IdentityConfiguration
+from azure.ai.ml.entities._util import load_from_dict
+
+from .compute import Compute, NetworkSettings
+
+
+class AmlComputeSshSettings:
+ """SSH settings to access a AML compute target.
+
+ :param admin_username: SSH user name.
+ :type admin_username: str
+ :param admin_password: SSH user password. Defaults to None.
+ :type admin_password: str
+ :param ssh_key_value: The SSH RSA private key. Use "ssh-keygen -t
+ rsa -b 2048" to generate your SSH key pairs. Defaults to None.
+ :type ssh_key_value: Optional[str]
+
+ .. admonition:: Example:
+
+ .. literalinclude:: ../samples/ml_samples_compute.py
+ :start-after: [START aml_compute_ssh_settings]
+ :end-before: [END aml_compute_ssh_settings]
+ :language: python
+ :dedent: 8
+ :caption: Configuring an AmlComputeSshSettings object.
+ """
+
+ def __init__(
+ self,
+ *,
+ admin_username: str,
+ admin_password: Optional[str] = None,
+ ssh_key_value: Optional[str] = None,
+ ) -> None:
+ self.admin_username = admin_username
+ self.admin_password = admin_password
+ self.ssh_key_value = ssh_key_value
+
+ def _to_user_account_credentials(self) -> UserAccountCredentials:
+ return UserAccountCredentials(
+ admin_user_name=self.admin_username,
+ admin_user_password=self.admin_password,
+ admin_user_ssh_public_key=self.ssh_key_value,
+ )
+
+ @classmethod
+ def _from_user_account_credentials(cls, credentials: UserAccountCredentials) -> "AmlComputeSshSettings":
+ return cls(
+ admin_username=credentials.admin_user_name,
+ admin_password=credentials.admin_user_password,
+ ssh_key_value=credentials.admin_user_ssh_public_key,
+ )
+
+
+class AmlCompute(Compute):
+ """AzureML Compute resource.
+
+ :param name: Name of the compute resource.
+ :type name: str
+ :param description: Description of the compute resource.
+ :type description: Optional[str]
+ :param size: Size of the compute. Defaults to None.
+ :type size: Optional[str]
+ :param tags: A set of tags. Contains resource tags defined as key/value pairs.
+ :type tags: Optional[dict[str, str]]
+ :param ssh_settings: SSH settings to access the AzureML compute cluster.
+ :type ssh_settings: Optional[~azure.ai.ml.entities.AmlComputeSshSettings]
+ :param network_settings: Virtual network settings for the AzureML compute cluster.
+ :type network_settings: Optional[~azure.ai.ml.entities.NetworkSettings]
+ :param idle_time_before_scale_down: Node idle time before scaling down. Defaults to None.
+ :type idle_time_before_scale_down: Optional[int]
+ :param identity: The identities that are associated with the compute cluster.
+ :type identity: Optional[~azure.ai.ml.entities.IdentityConfiguration]
+ :param tier: Virtual Machine tier. Accepted values include: "Dedicated", "LowPriority". Defaults to None.
+ :type tier: Optional[str]
+ :param min_instances: Minimum number of instances. Defaults to None.
+ :type min_instances: Optional[int]
+ :param max_instances: Maximum number of instances. Defaults to None.
+ :type max_instances: Optional[int]
+ :param ssh_public_access_enabled: State of the public SSH port. Accepted values are:
+ * False - Indicates that the public SSH port is closed on all nodes of the cluster.
+ * True - Indicates that the public SSH port is open on all nodes of the cluster.
+ * None - Indicates that the public SSH port is closed on all nodes of the cluster if VNet is defined,
+ else is open all public nodes.
+ It can be None only during cluster creation time. After creation it will be either True or False.
+ Defaults to None.
+ :type ssh_public_access_enabled: Optional[bool]
+ :param enable_node_public_ip: Enable or disable node public IP address provisioning. Accepted values are:
+ * True - Indicates that the compute nodes will have public IPs provisioned.
+ * False - Indicates that the compute nodes will have a private endpoint and no public IPs.
+ Defaults to True.
+ :type enable_node_public_ip: bool
+
+ .. admonition:: Example:
+
+ .. literalinclude:: ../samples/ml_samples_compute.py
+ :start-after: [START amlcompute]
+ :end-before: [END amlcompute]
+ :language: python
+ :dedent: 8
+ :caption: Creating an AmlCompute object.
+ """
+
+ def __init__(
+ self,
+ *,
+ name: str,
+ description: Optional[str] = None,
+ size: Optional[str] = None,
+ tags: Optional[dict] = None,
+ ssh_public_access_enabled: Optional[bool] = None,
+ ssh_settings: Optional[AmlComputeSshSettings] = None,
+ min_instances: Optional[int] = None,
+ max_instances: Optional[int] = None,
+ network_settings: Optional[NetworkSettings] = None,
+ idle_time_before_scale_down: Optional[int] = None,
+ identity: Optional[IdentityConfiguration] = None,
+ tier: Optional[str] = None,
+ enable_node_public_ip: bool = True,
+ **kwargs: Any,
+ ) -> None:
+ kwargs[TYPE] = ComputeType.AMLCOMPUTE
+ super().__init__(
+ name=name,
+ description=description,
+ location=kwargs.pop("location", None),
+ tags=tags,
+ **kwargs,
+ )
+ self.size = size
+ self.min_instances = min_instances or 0
+ self.max_instances = max_instances or 1
+ self.idle_time_before_scale_down = idle_time_before_scale_down
+ self.identity = identity
+ self.ssh_public_access_enabled = ssh_public_access_enabled
+ self.ssh_settings = ssh_settings
+ self.network_settings = network_settings
+ self.tier = tier
+ self.enable_node_public_ip = enable_node_public_ip
+ self.subnet = None
+
+ @classmethod
+ def _load_from_rest(cls, rest_obj: ComputeResource) -> "AmlCompute":
+ prop = rest_obj.properties
+
+ network_settings = None
+ if prop.properties.subnet or (prop.properties.enable_node_public_ip is not None):
+ network_settings = NetworkSettings(
+ subnet=prop.properties.subnet.id if prop.properties.subnet else None,
+ )
+
+ ssh_settings = (
+ AmlComputeSshSettings._from_user_account_credentials(prop.properties.user_account_credentials)
+ if prop.properties.user_account_credentials
+ else None
+ )
+
+ response = AmlCompute(
+ name=rest_obj.name,
+ id=rest_obj.id,
+ description=prop.description,
+ location=(prop.compute_location if prop.compute_location else rest_obj.location),
+ tags=rest_obj.tags if rest_obj.tags else None,
+ provisioning_state=prop.provisioning_state,
+ provisioning_errors=(
+ prop.provisioning_errors[0].error.code
+ if (prop.provisioning_errors and len(prop.provisioning_errors) > 0)
+ else None
+ ),
+ size=prop.properties.vm_size,
+ tier=camel_to_snake(prop.properties.vm_priority),
+ min_instances=(prop.properties.scale_settings.min_node_count if prop.properties.scale_settings else None),
+ max_instances=(prop.properties.scale_settings.max_node_count if prop.properties.scale_settings else None),
+ network_settings=network_settings or None,
+ ssh_settings=ssh_settings,
+ ssh_public_access_enabled=(prop.properties.remote_login_port_public_access == "Enabled"),
+ idle_time_before_scale_down=(
+ prop.properties.scale_settings.node_idle_time_before_scale_down.total_seconds()
+ if prop.properties.scale_settings and prop.properties.scale_settings.node_idle_time_before_scale_down
+ else None
+ ),
+ identity=(
+ IdentityConfiguration._from_compute_rest_object(rest_obj.identity) if rest_obj.identity else None
+ ),
+ created_on=prop.additional_properties.get("createdOn", None),
+ enable_node_public_ip=(
+ prop.properties.enable_node_public_ip if prop.properties.enable_node_public_ip is not None else True
+ ),
+ )
+ return response
+
+ def _set_full_subnet_name(self, subscription_id: str, rg: str) -> None:
+ if self.network_settings:
+ self.subnet = get_subnet_str(
+ self.network_settings.vnet_name,
+ self.network_settings.subnet,
+ subscription_id,
+ rg,
+ )
+
+ def _to_dict(self) -> Dict:
+ res: dict = AmlComputeSchema(context={BASE_PATH_CONTEXT_KEY: "./"}).dump(self)
+ return res
+
+ @classmethod
+ def _load_from_dict(cls, data: Dict, context: Dict, **kwargs: Any) -> "AmlCompute":
+ loaded_data = load_from_dict(AmlComputeSchema, data, context, **kwargs)
+ return AmlCompute(**loaded_data)
+
+ def _to_rest_object(self) -> ComputeResource:
+ if self.network_settings and self.network_settings.subnet:
+ subnet_resource = ResourceId(id=self.subnet)
+ else:
+ subnet_resource = None
+
+ # Scale settings is required when creating an AzureML compute cluster.
+ scale_settings = ScaleSettings(
+ max_node_count=self.max_instances,
+ min_node_count=self.min_instances,
+ node_idle_time_before_scale_down=(
+ to_iso_duration_format(int(self.idle_time_before_scale_down))
+ if self.idle_time_before_scale_down
+ else None
+ ),
+ )
+ remote_login_public_access = "Enabled"
+ disableLocalAuth = not (self.ssh_public_access_enabled and self.ssh_settings is not None)
+ if self.ssh_public_access_enabled is not None:
+ remote_login_public_access = "Enabled" if self.ssh_public_access_enabled else "Disabled"
+
+ else:
+ remote_login_public_access = "NotSpecified"
+ aml_prop = AmlComputeProperties(
+ vm_size=self.size if self.size else ComputeDefaults.VMSIZE,
+ vm_priority=snake_to_pascal(self.tier),
+ user_account_credentials=(self.ssh_settings._to_user_account_credentials() if self.ssh_settings else None),
+ scale_settings=scale_settings,
+ subnet=subnet_resource,
+ remote_login_port_public_access=remote_login_public_access,
+ enable_node_public_ip=self.enable_node_public_ip,
+ )
+
+ aml_comp = AmlComputeRest(
+ description=self.description,
+ compute_type=self.type,
+ properties=aml_prop,
+ disable_local_auth=disableLocalAuth,
+ )
+ return ComputeResource(
+ location=self.location,
+ properties=aml_comp,
+ identity=(self.identity._to_compute_rest_object() if self.identity else None),
+ tags=self.tags,
+ )
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_compute/compute.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_compute/compute.py
new file mode 100644
index 00000000..de18da5a
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_compute/compute.py
@@ -0,0 +1,261 @@
+# ---------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# ---------------------------------------------------------
+
+# pylint: disable=protected-access
+
+from abc import abstractmethod
+from os import PathLike
+from pathlib import Path
+from typing import IO, Any, AnyStr, Dict, Optional, Union, cast
+
+from azure.ai.ml._restclient.v2022_10_01_preview.models import ComputeResource
+from azure.ai.ml._schema.compute.compute import ComputeSchema
+from azure.ai.ml._utils.utils import dump_yaml_to_file
+from azure.ai.ml.constants._common import BASE_PATH_CONTEXT_KEY, PARAMS_OVERRIDE_KEY, CommonYamlFields
+from azure.ai.ml.constants._compute import ComputeType
+from azure.ai.ml.entities._mixins import RestTranslatableMixin
+from azure.ai.ml.entities._resource import Resource
+from azure.ai.ml.entities._util import find_type_in_override
+from azure.ai.ml.exceptions import ErrorCategory, ErrorTarget, ValidationException
+
+
+class Compute(Resource, RestTranslatableMixin):
+ """Base class for compute resources.
+
+ This class should not be instantiated directly. Instead, use one of its subclasses.
+
+ :param type: The compute type. Accepted values are "amlcompute", "computeinstance",
+ "virtualmachine", "kubernetes", and "synapsespark".
+ :type type: str
+ :param name: Name of the compute resource.
+ :type name: str
+ :param location: The resource location. Defaults to workspace location.
+ :type location: Optional[str]
+ :param description: Description of the resource. Defaults to None.
+ :type description: Optional[str]
+ :param resource_id: ARM resource id of the underlying compute. Defaults to None.
+ :type resource_id: Optional[str]
+ :param tags: A set of tags. Contains resource tags defined as key/value pairs.
+ :type tags: Optional[dict[str, str]]
+ """
+
+ def __init__(
+ self,
+ name: str,
+ location: Optional[str] = None,
+ description: Optional[str] = None,
+ resource_id: Optional[str] = None,
+ tags: Optional[Dict] = None,
+ **kwargs: Any,
+ ) -> None:
+ self._type: Optional[str] = kwargs.pop("type", None)
+ if self._type:
+ self._type = self._type.lower()
+
+ self._created_on: Optional[str] = kwargs.pop("created_on", None)
+ self._provisioning_state: Optional[str] = kwargs.pop("provisioning_state", None)
+ self._provisioning_errors: Optional[str] = kwargs.pop("provisioning_errors", None)
+
+ super().__init__(name=name, description=description, **kwargs)
+ self.resource_id = resource_id
+ self.location = location
+ self.tags = tags
+
+ @property
+ def type(self) -> Optional[str]:
+ """The compute type.
+
+ :return: The compute type.
+ :rtype: Optional[str]
+ """
+ return self._type
+
+ @property
+ def created_on(self) -> Optional[str]:
+ """The compute resource creation timestamp.
+
+ :return: The compute resource creation timestamp.
+ :rtype: Optional[str]
+ """
+ return self._created_on
+
+ @property
+ def provisioning_state(self) -> Optional[str]:
+ """The compute resource's provisioning state.
+
+ :return: The compute resource's provisioning state.
+ :rtype: Optional[str]
+ """
+ return self._provisioning_state
+
+ @property
+ def provisioning_errors(self) -> Optional[str]:
+ """The compute resource provisioning errors.
+
+ :return: The compute resource provisioning errors.
+ :rtype: Optional[str]
+ """
+ return self._provisioning_errors
+
+ def _to_rest_object(self) -> ComputeResource:
+ pass
+
+ @classmethod
+ def _from_rest_object(cls, obj: ComputeResource) -> "Compute":
+ from azure.ai.ml.entities import (
+ AmlCompute,
+ ComputeInstance,
+ KubernetesCompute,
+ SynapseSparkCompute,
+ UnsupportedCompute,
+ VirtualMachineCompute,
+ )
+
+ mapping = {
+ ComputeType.AMLCOMPUTE.lower(): AmlCompute,
+ ComputeType.COMPUTEINSTANCE.lower(): ComputeInstance,
+ ComputeType.VIRTUALMACHINE.lower(): VirtualMachineCompute,
+ ComputeType.KUBERNETES.lower(): KubernetesCompute,
+ ComputeType.SYNAPSESPARK.lower(): SynapseSparkCompute,
+ }
+ compute_type = obj.properties.compute_type.lower() if obj.properties.compute_type else None
+
+ class_type = cast(
+ Optional[Union[AmlCompute, ComputeInstance, VirtualMachineCompute, KubernetesCompute, SynapseSparkCompute]],
+ mapping.get(compute_type, None), # type: ignore
+ )
+ if class_type:
+ return class_type._load_from_rest(obj)
+ _unsupported_from_rest: Compute = UnsupportedCompute._load_from_rest(obj)
+ return _unsupported_from_rest
+
+ @classmethod
+ @abstractmethod
+ def _load_from_rest(cls, rest_obj: ComputeResource) -> "Compute":
+ pass
+
+ def _set_full_subnet_name(self, subscription_id: str, rg: str) -> None:
+ pass
+
+ def dump(self, dest: Union[str, PathLike, IO[AnyStr]], **kwargs: Any) -> None:
+ """Dump the compute content into a file in yaml format.
+
+ :param dest: The destination to receive this compute's content.
+ Must be either a path to a local file, or an already-open file stream.
+ If dest is a file path, a new file will be created,
+ and an exception is raised if the file exists.
+ If dest is an open file, the file will be written to directly,
+ and an exception will be raised if the file is not writable.'.
+ :type dest: Union[PathLike, str, IO[AnyStr]]
+ """
+ path = kwargs.pop("path", None)
+ yaml_serialized = self._to_dict()
+ dump_yaml_to_file(dest, yaml_serialized, default_flow_style=False, path=path, **kwargs)
+
+ def _to_dict(self) -> Dict:
+ res: dict = ComputeSchema(context={BASE_PATH_CONTEXT_KEY: "./"}).dump(self)
+ return res
+
+ @classmethod
+ def _load(
+ cls,
+ data: Optional[Dict] = None,
+ yaml_path: Optional[Union[PathLike, str]] = None,
+ params_override: Optional[list] = None,
+ **kwargs: Any,
+ ) -> "Compute":
+ data = data or {}
+ params_override = params_override or []
+ context = {
+ BASE_PATH_CONTEXT_KEY: Path(yaml_path).parent if yaml_path else Path("./"),
+ PARAMS_OVERRIDE_KEY: params_override,
+ }
+ from azure.ai.ml.entities import (
+ AmlCompute,
+ ComputeInstance,
+ KubernetesCompute,
+ SynapseSparkCompute,
+ VirtualMachineCompute,
+ )
+
+ type_in_override = find_type_in_override(params_override) if params_override else None
+ compute_type = type_in_override or data.get(CommonYamlFields.TYPE, None) # override takes the priority
+ if compute_type:
+ if compute_type.lower() == ComputeType.VIRTUALMACHINE:
+ _vm_load_from_dict: Compute = VirtualMachineCompute._load_from_dict(data, context, **kwargs)
+ return _vm_load_from_dict
+ if compute_type.lower() == ComputeType.AMLCOMPUTE:
+ _aml_load_from_dict: Compute = AmlCompute._load_from_dict(data, context, **kwargs)
+ return _aml_load_from_dict
+ if compute_type.lower() == ComputeType.COMPUTEINSTANCE:
+ _compute_instance_load_from_dict: Compute = ComputeInstance._load_from_dict(data, context, **kwargs)
+ return _compute_instance_load_from_dict
+ if compute_type.lower() == ComputeType.KUBERNETES:
+ _kub_load_from_dict: Compute = KubernetesCompute._load_from_dict(data, context, **kwargs)
+ return _kub_load_from_dict
+ if compute_type.lower() == ComputeType.SYNAPSESPARK:
+ _synapse_spark_load_from_dict: Compute = SynapseSparkCompute._load_from_dict(data, context, **kwargs)
+ return _synapse_spark_load_from_dict
+ msg = f"Unknown compute type: {compute_type}"
+ raise ValidationException(
+ message=msg,
+ target=ErrorTarget.COMPUTE,
+ no_personal_data_message=msg,
+ error_category=ErrorCategory.USER_ERROR,
+ )
+
+ @classmethod
+ @abstractmethod
+ def _load_from_dict(cls, data: Dict, context: Dict, **kwargs: Any) -> "Compute":
+ pass
+
+
+class NetworkSettings:
+ """Network settings for a compute resource. If the workspace and VNet are in different resource groups,
+ please provide the full URI for subnet and leave vnet_name as None.
+
+ :param vnet_name: The virtual network name.
+ :type vnet_name: Optional[str]
+ :param subnet: The subnet name.
+ :type subnet: Optional[str]
+
+ .. admonition:: Example:
+
+ .. literalinclude:: ../samples/ml_samples_compute.py
+ :start-after: [START network_settings]
+ :end-before: [END network_settings]
+ :language: python
+ :dedent: 8
+ :caption: Configuring NetworkSettings for an AmlCompute object.
+ """
+
+ def __init__(
+ self,
+ *,
+ vnet_name: Optional[str] = None,
+ subnet: Optional[str] = None,
+ **kwargs: Any,
+ ) -> None:
+ self.vnet_name = vnet_name
+ self.subnet = subnet
+ self._public_ip_address: str = kwargs.pop("public_ip_address", None)
+ self._private_ip_address: str = kwargs.pop("private_ip_address", None)
+
+ @property
+ def public_ip_address(self) -> str:
+ """Public IP address of the compute instance.
+
+ :return: Public IP address.
+ :rtype: str
+ """
+ return self._public_ip_address
+
+ @property
+ def private_ip_address(self) -> str:
+ """Private IP address of the compute instance.
+
+ :return: Private IP address.
+ :rtype: str
+ """
+ return self._private_ip_address
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_compute/compute_instance.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_compute/compute_instance.py
new file mode 100644
index 00000000..9cbb2528
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_compute/compute_instance.py
@@ -0,0 +1,511 @@
+# ---------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# ---------------------------------------------------------
+
+# pylint: disable=protected-access,too-many-instance-attributes
+
+import logging
+import re
+import warnings
+from typing import Any, Dict, List, Optional
+
+from azure.ai.ml._restclient.v2022_10_01_preview.models import AssignedUser
+from azure.ai.ml._restclient.v2023_08_01_preview.models import ComputeInstance as CIRest
+from azure.ai.ml._restclient.v2023_08_01_preview.models import ComputeInstanceProperties
+from azure.ai.ml._restclient.v2023_08_01_preview.models import ComputeInstanceSshSettings as CiSShSettings
+from azure.ai.ml._restclient.v2023_08_01_preview.models import (
+ ComputeResource,
+ PersonalComputeInstanceSettings,
+ ResourceId,
+)
+from azure.ai.ml._schema._utils.utils import get_subnet_str
+from azure.ai.ml._schema.compute.compute_instance import ComputeInstanceSchema
+from azure.ai.ml.constants._common import BASE_PATH_CONTEXT_KEY, TYPE
+from azure.ai.ml.constants._compute import ComputeDefaults, ComputeType
+from azure.ai.ml.entities._compute.compute import Compute, NetworkSettings
+from azure.ai.ml.entities._credentials import IdentityConfiguration
+from azure.ai.ml.entities._mixins import DictMixin
+from azure.ai.ml.entities._util import load_from_dict
+
+from ._custom_applications import CustomApplications, validate_custom_applications
+from ._image_metadata import ImageMetadata
+from ._schedule import ComputeSchedules
+from ._setup_scripts import SetupScripts
+
+module_logger = logging.getLogger(__name__)
+
+
+class ComputeInstanceSshSettings:
+ """Credentials for an administrator user account to SSH into the compute node.
+
+ Can only be configured if `ssh_public_access_enabled` is set to true on compute
+ resource.
+
+ :param ssh_key_value: The SSH public key of the administrator user account.
+ :type ssh_key_value: Optional[str]
+
+ .. admonition:: Example:
+
+ .. literalinclude:: ../samples/ml_samples_compute.py
+ :start-after: [START compute_instance_ssh_settings]
+ :end-before: [END compute_instance_ssh_settings]
+ :language: python
+ :dedent: 8
+ :caption: Configuring ComputeInstanceSshSettings object.
+ """
+
+ def __init__(
+ self,
+ *,
+ ssh_key_value: Optional[str] = None,
+ **kwargs: Any,
+ ) -> None:
+ self.ssh_key_value = ssh_key_value
+ self._ssh_port: str = kwargs.pop("ssh_port", None)
+ self._admin_username: str = kwargs.pop("admin_username", None)
+
+ @property
+ def admin_username(self) -> str:
+ """The name of the administrator user account which can be used to SSH into nodes.
+
+ :return: The name of the administrator user account.
+ :rtype: str
+ """
+ return self._admin_username
+
+ @property
+ def ssh_port(self) -> str:
+ """SSH port.
+
+ :return: SSH port.
+ :rtype: str
+ """
+ return self._ssh_port
+
+
+class AssignedUserConfiguration(DictMixin):
+ """Settings to create a compute resource on behalf of another user.
+
+ :param user_tenant_id: Tenant ID of the user to assign the compute target to.
+ :type user_tenant_id: str
+ :param user_object_id: Object ID of the user to assign the compute target to.
+ :type user_object_id: str
+
+ .. admonition:: Example:
+
+ .. literalinclude:: ../samples/ml_samples_compute.py
+ :start-after: [START assigned_user_configuration]
+ :end-before: [END assigned_user_configuration]
+ :language: python
+ :dedent: 8
+ :caption: Creating an AssignedUserConfiguration.
+ """
+
+ def __init__(self, *, user_tenant_id: str, user_object_id: str) -> None:
+ self.user_tenant_id = user_tenant_id
+ self.user_object_id = user_object_id
+
+
+class ComputeInstance(Compute):
+ """Compute Instance resource.
+
+ :param name: Name of the compute.
+ :type name: str
+ :param location: The resource location.
+ :type location: Optional[str]
+ :param description: Description of the resource.
+ :type description: Optional[str]
+ :param size: Compute size.
+ :type size: Optional[str]
+ :param tags: A set of tags. Contains resource tags defined as key/value pairs.
+ :type tags: Optional[dict[str, str]]
+ :param create_on_behalf_of: Configuration to create resource on behalf of another user. Defaults to None.
+ :type create_on_behalf_of: Optional[~azure.ai.ml.entities.AssignedUserConfiguration]
+ :ivar state: State of the resource.
+ :type state: Optional[str]
+ :ivar last_operation: The last operation.
+ :type last_operation: Optional[Dict[str, str]]
+ :ivar applications: Applications associated with the compute instance.
+ :type applications: Optional[List[Dict[str, str]]]
+ :param network_settings: Network settings for the compute instance.
+ :type network_settings: Optional[~azure.ai.ml.entities.NetworkSettings]
+ :param ssh_settings: SSH settings for the compute instance.
+ :type ssh_settings: Optional[~azure.ai.ml.entities.ComputeInstanceSshSettings]
+ :param ssh_public_access_enabled: State of the public SSH port. Defaults to None.
+ Possible values are:
+
+ * False - Indicates that the public ssh port is closed on all nodes of the cluster.
+ * True - Indicates that the public ssh port is open on all nodes of the cluster.
+ * None -Indicates that the public ssh port is closed on all nodes of the cluster if VNet is defined,
+ else is open all public nodes. It can be default only during cluster creation time, after
+ creation it will be either True or False.
+
+ :type ssh_public_access_enabled: Optional[bool]
+ :param schedules: Compute instance schedules. Defaults to None.
+ :type schedules: Optional[~azure.ai.ml.entities.ComputeSchedules]
+ :param identity: The identities that are associated with the compute cluster.
+ :type identity: ~azure.ai.ml.entities.IdentityConfiguration
+ :param idle_time_before_shutdown: Deprecated. Use the `idle_time_before_shutdown_minutes` parameter instead.
+ Stops compute instance after user defined period of inactivity.
+ Time is defined in ISO8601 format. Minimum is 15 minutes, maximum is 3 days.
+ :type idle_time_before_shutdown: Optional[str]
+ :param idle_time_before_shutdown_minutes: Stops compute instance after a user defined period of
+ inactivity in minutes. Minimum is 15 minutes, maximum is 3 days.
+ :type idle_time_before_shutdown_minutes: Optional[int]
+ :param enable_node_public_ip: Enable or disable node public IP address provisioning. Defaults to True.
+ Possible values are:
+
+ * True - Indicates that the compute nodes will have public IPs provisioned.
+ * False - Indicates that the compute nodes will have a private endpoint and no public IPs.
+
+ :type enable_node_public_ip: Optional[bool]
+ :param setup_scripts: Details of customized scripts to execute for setting up the cluster.
+ :type setup_scripts: Optional[~azure.ai.ml.entities.SetupScripts]
+ :param custom_applications: List of custom applications and their endpoints for the compute instance.
+ :type custom_applications: Optional[List[~azure.ai.ml.entities.CustomApplications]]
+ :param enable_sso: Enable or disable single sign-on. Defaults to True.
+ :type enable_sso: bool
+ :param enable_root_access: Enable or disable root access. Defaults to True.
+ :type enable_root_access: bool
+ :param release_quota_on_stop: Release quota on stop for the compute instance. Defaults to False.
+ :type release_quota_on_stop: bool
+ :param enable_os_patching: Enable or disable OS patching for the compute instance. Defaults to False.
+ :type enable_os_patching: bool
+
+ .. admonition:: Example:
+
+ .. literalinclude:: ../samples/ml_samples_compute.py
+ :start-after: [START compute_instance]
+ :end-before: [END compute_instance]
+ :language: python
+ :dedent: 8
+ :caption: Creating a ComputeInstance object.
+ """
+
+ def __init__(
+ self,
+ *,
+ name: str,
+ description: Optional[str] = None,
+ size: Optional[str] = None,
+ tags: Optional[dict] = None,
+ ssh_public_access_enabled: Optional[bool] = None,
+ create_on_behalf_of: Optional[AssignedUserConfiguration] = None,
+ network_settings: Optional[NetworkSettings] = None,
+ ssh_settings: Optional[ComputeInstanceSshSettings] = None,
+ schedules: Optional[ComputeSchedules] = None,
+ identity: Optional[IdentityConfiguration] = None,
+ idle_time_before_shutdown: Optional[str] = None,
+ idle_time_before_shutdown_minutes: Optional[int] = None,
+ setup_scripts: Optional[SetupScripts] = None,
+ enable_node_public_ip: bool = True,
+ custom_applications: Optional[List[CustomApplications]] = None,
+ enable_sso: bool = True,
+ enable_root_access: bool = True,
+ release_quota_on_stop: bool = False,
+ enable_os_patching: bool = False,
+ **kwargs: Any,
+ ) -> None:
+ kwargs[TYPE] = ComputeType.COMPUTEINSTANCE
+ self._state: str = kwargs.pop("state", None)
+ self._last_operation: dict = kwargs.pop("last_operation", None)
+ self._os_image_metadata: ImageMetadata = kwargs.pop("os_image_metadata", None)
+ self._services: list = kwargs.pop("services", None)
+ super().__init__(
+ name=name,
+ location=kwargs.pop("location", None),
+ resource_id=kwargs.pop("resource_id", None),
+ description=description,
+ tags=tags,
+ **kwargs,
+ )
+ self.size = size
+ self.ssh_public_access_enabled = ssh_public_access_enabled
+ self.create_on_behalf_of = create_on_behalf_of
+ self.network_settings = network_settings
+ self.ssh_settings = ssh_settings
+ self.schedules = schedules
+ self.identity = identity
+ self.idle_time_before_shutdown = idle_time_before_shutdown
+ self.idle_time_before_shutdown_minutes = idle_time_before_shutdown_minutes
+ self.setup_scripts = setup_scripts
+ self.enable_node_public_ip = enable_node_public_ip
+ self.enable_sso = enable_sso
+ self.enable_root_access = enable_root_access
+ self.release_quota_on_stop = release_quota_on_stop
+ self.enable_os_patching = enable_os_patching
+ self.custom_applications = custom_applications
+ self.subnet = None
+
+ @property
+ def services(self) -> List[Dict[str, str]]:
+ """The compute instance's services.
+
+ :return: The compute instance's services.
+ :rtype: List[Dict[str, str]]
+ """
+ return self._services
+
+ @property
+ def last_operation(self) -> Dict[str, str]:
+ """The last operation.
+
+ :return: The last operation.
+ :rtype: str
+ """
+ return self._last_operation
+
+ @property
+ def state(self) -> str:
+ """The state of the compute.
+
+ :return: The state of the compute.
+ :rtype: str
+ """
+ return self._state
+
+ @property
+ def os_image_metadata(self) -> ImageMetadata:
+ """Metadata about the operating system image for this compute instance.
+
+ :return: Operating system image metadata.
+ :rtype: ~azure.ai.ml.entities.ImageMetadata
+ """
+ return self._os_image_metadata
+
+ def _to_rest_object(self) -> ComputeResource:
+ if self.network_settings and self.network_settings.subnet:
+ subnet_resource = ResourceId(id=self.subnet)
+ else:
+ subnet_resource = None
+
+ ssh_settings = None
+ if self.ssh_public_access_enabled is not None or self.ssh_settings is not None:
+ ssh_settings = CiSShSettings()
+ ssh_settings.ssh_public_access = "Enabled" if self.ssh_public_access_enabled else "Disabled"
+ ssh_settings.admin_public_key = (
+ self.ssh_settings.ssh_key_value if self.ssh_settings and self.ssh_settings.ssh_key_value else None
+ )
+
+ personal_compute_instance_settings = None
+ if self.create_on_behalf_of:
+ personal_compute_instance_settings = PersonalComputeInstanceSettings(
+ assigned_user=AssignedUser(
+ object_id=self.create_on_behalf_of.user_object_id,
+ tenant_id=self.create_on_behalf_of.user_tenant_id,
+ )
+ )
+
+ idle_time_before_shutdown = None
+ if self.idle_time_before_shutdown_minutes:
+ idle_time_before_shutdown = f"PT{self.idle_time_before_shutdown_minutes}M"
+ elif self.idle_time_before_shutdown:
+ warnings.warn(
+ """ The property 'idle_time_before_shutdown' is deprecated.
+ Please use'idle_time_before_shutdown_minutes' instead.""",
+ DeprecationWarning,
+ )
+ idle_time_before_shutdown = self.idle_time_before_shutdown
+
+ compute_instance_prop = ComputeInstanceProperties(
+ vm_size=self.size if self.size else ComputeDefaults.VMSIZE,
+ subnet=subnet_resource,
+ ssh_settings=ssh_settings,
+ personal_compute_instance_settings=personal_compute_instance_settings,
+ idle_time_before_shutdown=idle_time_before_shutdown,
+ enable_node_public_ip=self.enable_node_public_ip,
+ enable_sso=self.enable_sso,
+ enable_root_access=self.enable_root_access,
+ release_quota_on_stop=self.release_quota_on_stop,
+ enable_os_patching=self.enable_os_patching,
+ )
+ compute_instance_prop.schedules = self.schedules._to_rest_object() if self.schedules else None
+ compute_instance_prop.setup_scripts = self.setup_scripts._to_rest_object() if self.setup_scripts else None
+ if self.custom_applications:
+ validate_custom_applications(self.custom_applications)
+ compute_instance_prop.custom_services = []
+ for app in self.custom_applications:
+ compute_instance_prop.custom_services.append(app._to_rest_object())
+ compute_instance = CIRest(
+ description=self.description,
+ compute_type=self.type,
+ properties=compute_instance_prop,
+ )
+ return ComputeResource(
+ location=self.location,
+ properties=compute_instance,
+ identity=(self.identity._to_compute_rest_object() if self.identity else None),
+ tags=self.tags,
+ )
+
+ def _to_dict(self) -> Dict:
+ res: dict = ComputeInstanceSchema(context={BASE_PATH_CONTEXT_KEY: "./"}).dump(self)
+ return res
+
+ def _set_full_subnet_name(self, subscription_id: str, rg: str) -> None:
+ if self.network_settings and (self.network_settings.vnet_name or self.network_settings.subnet):
+ self.subnet = get_subnet_str(
+ self.network_settings.vnet_name,
+ self.network_settings.subnet,
+ subscription_id,
+ rg,
+ )
+
+ @classmethod
+ def _load_from_rest(cls, rest_obj: ComputeResource) -> "ComputeInstance":
+ prop = rest_obj.properties
+ create_on_behalf_of = None
+ if prop.properties and prop.properties.personal_compute_instance_settings:
+ create_on_behalf_of = AssignedUserConfiguration(
+ user_tenant_id=prop.properties.personal_compute_instance_settings.assigned_user.tenant_id,
+ user_object_id=prop.properties.personal_compute_instance_settings.assigned_user.object_id,
+ )
+ ssh_settings = None
+ if prop.properties and prop.properties.ssh_settings:
+ ssh_settings = ComputeInstanceSshSettings(
+ ssh_key_value=prop.properties.ssh_settings.admin_public_key,
+ ssh_port=prop.properties.ssh_settings.ssh_port,
+ admin_username=prop.properties.ssh_settings.admin_user_name,
+ )
+
+ network_settings = None
+ if prop.properties and (
+ prop.properties.subnet
+ or (
+ prop.properties.connectivity_endpoints
+ and (
+ prop.properties.connectivity_endpoints.private_ip_address
+ or prop.properties.connectivity_endpoints.public_ip_address
+ )
+ )
+ ):
+ network_settings = NetworkSettings(
+ subnet=prop.properties.subnet.id if prop.properties.subnet else None,
+ public_ip_address=(
+ prop.properties.connectivity_endpoints.public_ip_address
+ if prop.properties.connectivity_endpoints
+ and prop.properties.connectivity_endpoints.public_ip_address
+ else None
+ ),
+ private_ip_address=(
+ prop.properties.connectivity_endpoints.private_ip_address
+ if prop.properties.connectivity_endpoints
+ and prop.properties.connectivity_endpoints.private_ip_address
+ else None
+ ),
+ )
+ os_image_metadata = None
+ if prop.properties and prop.properties.os_image_metadata:
+ metadata = prop.properties.os_image_metadata
+ os_image_metadata = ImageMetadata(
+ is_latest_os_image_version=(
+ metadata.is_latest_os_image_version if metadata.is_latest_os_image_version is not None else None
+ ),
+ current_image_version=metadata.current_image_version if metadata.current_image_version else None,
+ latest_image_version=metadata.latest_image_version if metadata.latest_image_version else None,
+ )
+
+ idle_time_before_shutdown = None
+ idle_time_before_shutdown_minutes = None
+ idle_time_before_shutdown_pattern = r"PT([0-9]+)M"
+ if prop.properties and prop.properties.idle_time_before_shutdown:
+ idle_time_before_shutdown = prop.properties.idle_time_before_shutdown
+ idle_time_match = re.match(
+ pattern=idle_time_before_shutdown_pattern,
+ string=idle_time_before_shutdown,
+ )
+ idle_time_before_shutdown_minutes = int(idle_time_match[1]) if idle_time_match else None
+ custom_applications = None
+ if prop.properties and prop.properties.custom_services:
+ custom_applications = []
+ for app in prop.properties.custom_services:
+ custom_applications.append(CustomApplications._from_rest_object(app))
+ response = ComputeInstance(
+ name=rest_obj.name,
+ id=rest_obj.id,
+ description=prop.description,
+ location=rest_obj.location,
+ resource_id=prop.resource_id,
+ tags=rest_obj.tags if rest_obj.tags else None,
+ provisioning_state=prop.provisioning_state,
+ provisioning_errors=(
+ prop.provisioning_errors[0].error.code
+ if (prop.provisioning_errors and len(prop.provisioning_errors) > 0)
+ else None
+ ),
+ size=prop.properties.vm_size if prop.properties else None,
+ state=prop.properties.state if prop.properties else None,
+ last_operation=(
+ prop.properties.last_operation.as_dict() if prop.properties and prop.properties.last_operation else None
+ ),
+ services=(
+ [app.as_dict() for app in prop.properties.applications]
+ if prop.properties and prop.properties.applications
+ else None
+ ),
+ created_on=(
+ rest_obj.properties.created_on.strftime("%Y-%m-%dT%H:%M:%S.%f%z")
+ if rest_obj.properties and rest_obj.properties.created_on is not None
+ else None
+ ),
+ create_on_behalf_of=create_on_behalf_of,
+ network_settings=network_settings,
+ ssh_settings=ssh_settings,
+ ssh_public_access_enabled=(
+ _ssh_public_access_to_bool(prop.properties.ssh_settings.ssh_public_access)
+ if (prop.properties and prop.properties.ssh_settings and prop.properties.ssh_settings.ssh_public_access)
+ else None
+ ),
+ schedules=(
+ ComputeSchedules._from_rest_object(prop.properties.schedules)
+ if prop.properties and prop.properties.schedules and prop.properties.schedules.compute_start_stop
+ else None
+ ),
+ identity=IdentityConfiguration._from_compute_rest_object(rest_obj.identity) if rest_obj.identity else None,
+ setup_scripts=(
+ SetupScripts._from_rest_object(prop.properties.setup_scripts)
+ if prop.properties and prop.properties.setup_scripts
+ else None
+ ),
+ idle_time_before_shutdown=idle_time_before_shutdown,
+ idle_time_before_shutdown_minutes=idle_time_before_shutdown_minutes,
+ os_image_metadata=os_image_metadata,
+ enable_node_public_ip=(
+ prop.properties.enable_node_public_ip
+ if (prop.properties and prop.properties.enable_node_public_ip is not None)
+ else True
+ ),
+ custom_applications=custom_applications,
+ enable_sso=(
+ prop.properties.enable_sso if (prop.properties and prop.properties.enable_sso is not None) else True
+ ),
+ enable_root_access=(
+ prop.properties.enable_root_access
+ if (prop.properties and prop.properties.enable_root_access is not None)
+ else True
+ ),
+ release_quota_on_stop=(
+ prop.properties.release_quota_on_stop
+ if (prop.properties and prop.properties.release_quota_on_stop is not None)
+ else False
+ ),
+ enable_os_patching=(
+ prop.properties.enable_os_patching
+ if (prop.properties and prop.properties.enable_os_patching is not None)
+ else False
+ ),
+ )
+ return response
+
+ @classmethod
+ def _load_from_dict(cls, data: Dict, context: Dict, **kwargs: Any) -> "ComputeInstance":
+ loaded_data = load_from_dict(ComputeInstanceSchema, data, context, **kwargs)
+ return ComputeInstance(**loaded_data)
+
+
+def _ssh_public_access_to_bool(value: str) -> Optional[bool]:
+ if value.lower() == "disabled":
+ return False
+ if value.lower() == "enabled":
+ return True
+ return None
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_compute/kubernetes_compute.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_compute/kubernetes_compute.py
new file mode 100644
index 00000000..bc8c2c28
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_compute/kubernetes_compute.py
@@ -0,0 +1,105 @@
+# ---------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# ---------------------------------------------------------
+
+# pylint: disable=protected-access
+
+from typing import Any, Dict, Optional
+
+from azure.ai.ml._restclient.v2022_10_01_preview.models import ComputeResource, Kubernetes, KubernetesProperties
+from azure.ai.ml._schema.compute.kubernetes_compute import KubernetesComputeSchema
+from azure.ai.ml.constants._common import BASE_PATH_CONTEXT_KEY, TYPE
+from azure.ai.ml.constants._compute import ComputeType
+from azure.ai.ml.entities._compute.compute import Compute
+from azure.ai.ml.entities._credentials import IdentityConfiguration
+from azure.ai.ml.entities._util import load_from_dict
+
+
+class KubernetesCompute(Compute):
+ """Kubernetes Compute resource.
+
+ :param namespace: The namespace of the KubernetesCompute. Defaults to "default".
+ :type namespace: Optional[str]
+ :param properties: The properties of the Kubernetes compute resource.
+ :type properties: Optional[Dict]
+ :param identity: The identities that are associated with the compute cluster.
+ :type identity: ~azure.ai.ml.entities.IdentityConfiguration
+
+ .. admonition:: Example:
+
+ .. literalinclude:: ../samples/ml_samples_compute.py
+ :start-after: [START kubernetes_compute]
+ :end-before: [END kubernetes_compute]
+ :language: python
+ :dedent: 8
+ :caption: Creating a KubernetesCompute object.
+ """
+
+ def __init__(
+ self,
+ *,
+ namespace: str = "default",
+ properties: Optional[Dict[str, Any]] = None,
+ identity: Optional[IdentityConfiguration] = None,
+ **kwargs: Any,
+ ) -> None:
+ kwargs[TYPE] = ComputeType.KUBERNETES
+ super().__init__(**kwargs)
+ self.namespace = namespace
+ self.properties = properties if properties else {}
+ if "properties" in self.properties:
+ self.properties["properties"]["namespace"] = namespace
+ self.identity = identity
+
+ @classmethod
+ def _load_from_rest(cls, rest_obj: ComputeResource) -> "KubernetesCompute":
+ prop = rest_obj.properties
+ return KubernetesCompute(
+ name=rest_obj.name,
+ id=rest_obj.id,
+ description=prop.description,
+ location=rest_obj.location,
+ resource_id=prop.resource_id,
+ tags=rest_obj.tags if rest_obj.tags else None,
+ provisioning_state=prop.provisioning_state,
+ provisioning_errors=(
+ prop.provisioning_errors[0].error.code
+ if (prop.provisioning_errors and len(prop.provisioning_errors) > 0)
+ else None
+ ),
+ created_on=prop.additional_properties.get("createdOn", None),
+ properties=prop.properties.as_dict() if prop.properties else None,
+ namespace=prop.properties.namespace,
+ identity=IdentityConfiguration._from_compute_rest_object(rest_obj.identity) if rest_obj.identity else None,
+ )
+
+ def _to_dict(self) -> Dict:
+ res: dict = KubernetesComputeSchema(context={BASE_PATH_CONTEXT_KEY: "./"}).dump(self)
+ return res
+
+ @classmethod
+ def _load_from_dict(cls, data: Dict, context: Dict, **kwargs: Any) -> "KubernetesCompute":
+ if not data:
+ data = {"namespace": "default"}
+ if "namespace" not in data:
+ data["namespace"] = "default"
+
+ loaded_data = load_from_dict(KubernetesComputeSchema, data, context, **kwargs)
+ return KubernetesCompute(**loaded_data)
+
+ def _to_rest_object(self) -> ComputeResource:
+ kubernetes_prop = KubernetesProperties.from_dict(self.properties)
+ kubernetes_prop.namespace = self.namespace
+ kubernetes_comp = Kubernetes(
+ resource_id=self.resource_id,
+ compute_location=self.location,
+ description=self.description,
+ properties=kubernetes_prop,
+ )
+ return ComputeResource(
+ location=self.location,
+ properties=kubernetes_comp,
+ name=self.name,
+ identity=(self.identity._to_compute_rest_object() if self.identity else None),
+ tags=self.tags,
+ )
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_compute/synapsespark_compute.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_compute/synapsespark_compute.py
new file mode 100644
index 00000000..99b366cb
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_compute/synapsespark_compute.py
@@ -0,0 +1,234 @@
+# ---------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# ---------------------------------------------------------
+from typing import Any, Dict, Optional
+
+from azure.ai.ml._restclient.v2022_10_01_preview.models import (
+ AutoPauseProperties,
+ AutoScaleProperties,
+ ComputeResource,
+ SynapseSpark,
+)
+from azure.ai.ml._schema.compute.synapsespark_compute import SynapseSparkComputeSchema
+from azure.ai.ml._utils._experimental import experimental
+from azure.ai.ml.constants._common import BASE_PATH_CONTEXT_KEY, TYPE
+from azure.ai.ml.constants._compute import ComputeType
+from azure.ai.ml.entities import Compute
+from azure.ai.ml.entities._credentials import IdentityConfiguration
+from azure.ai.ml.entities._util import load_from_dict
+
+
+class AutoScaleSettings:
+ """Auto-scale settings for Synapse Spark compute.
+
+ :keyword min_node_count: The minimum compute node count.
+ :paramtype min_node_count: Optional[int]
+ :keyword max_node_count: The maximum compute node count.
+ :paramtype max_node_count: Optional[int]
+ :keyword enabled: Specifies if auto-scale is enabled.
+ :paramtype enabled: Optional[bool]
+
+ .. admonition:: Example:
+
+ .. literalinclude:: ../samples/ml_samples_spark_configurations.py
+ :start-after: [START synapse_spark_compute_configuration]
+ :end-before: [END synapse_spark_compute_configuration]
+ :language: python
+ :dedent: 8
+ :caption: Configuring AutoScaleSettings on SynapseSparkCompute.
+ """
+
+ def __init__(
+ self,
+ *,
+ min_node_count: Optional[int] = None,
+ max_node_count: Optional[int] = None,
+ enabled: Optional[bool] = None,
+ ) -> None:
+ self.min_node_count = min_node_count
+ self.max_node_count = max_node_count
+ self.auto_scale_enabled = enabled
+
+ def _to_auto_scale_settings(self) -> AutoScaleProperties:
+ return AutoScaleProperties(
+ min_node_count=self.min_node_count,
+ max_node_count=self.max_node_count,
+ auto_scale_enabled=self.auto_scale_enabled,
+ )
+
+ @classmethod
+ def _from_auto_scale_settings(cls, autoscaleprops: AutoScaleProperties) -> "AutoScaleSettings":
+ return cls(
+ min_node_count=autoscaleprops.min_node_count,
+ max_node_count=autoscaleprops.max_node_count,
+ enabled=autoscaleprops.enabled,
+ )
+
+
+class AutoPauseSettings:
+ """Auto pause settings for Synapse Spark compute.
+
+ :keyword delay_in_minutes: The time delay in minutes before pausing cluster.
+ :paramtype delay_in_minutes: Optional[int]
+ :keyword enabled: Specifies if auto-pause is enabled.
+ :paramtype enabled: Optional[bool]
+
+ .. admonition:: Example:
+
+ .. literalinclude:: ../samples/ml_samples_spark_configurations.py
+ :start-after: [START synapse_spark_compute_configuration]
+ :end-before: [END synapse_spark_compute_configuration]
+ :language: python
+ :dedent: 8
+ :caption: Configuring AutoPauseSettings on SynapseSparkCompute.
+ """
+
+ def __init__(self, *, delay_in_minutes: Optional[int] = None, enabled: Optional[bool] = None) -> None:
+ self.delay_in_minutes = delay_in_minutes
+ self.auto_pause_enabled = enabled
+
+ def _to_auto_pause_settings(self) -> AutoPauseProperties:
+ return AutoPauseProperties(
+ delay_in_minutes=self.delay_in_minutes,
+ auto_pause_enabled=self.auto_pause_enabled,
+ )
+
+ @classmethod
+ def _from_auto_pause_settings(cls, autopauseprops: AutoPauseProperties) -> "AutoPauseSettings":
+ return cls(
+ delay_in_minutes=autopauseprops.delay_in_minutes,
+ enabled=autopauseprops.enabled,
+ )
+
+
+@experimental
+class SynapseSparkCompute(Compute):
+ """SynapseSpark Compute resource.
+
+ :keyword name: The name of the compute.
+ :paramtype name: str
+ :keyword description: The description of the resource. Defaults to None.
+ :paramtype description: Optional[str]
+ :keyword tags: The set of resource tags defined as key/value pairs. Defaults to None.
+ :paramtype tags: Optional[[dict[str, str]]
+ :keyword node_count: The number of nodes in the compute.
+ :paramtype node_count: Optional[int]
+ :keyword node_family: The node family of the compute.
+ :paramtype node_family: Optional[str]
+ :keyword node_size: The size of the node.
+ :paramtype node_size: Optional[str]
+ :keyword spark_version: The version of Spark to use.
+ :paramtype spark_version: Optional[str]
+ :keyword identity: The configuration of identities that are associated with the compute cluster.
+ :paramtype identity: Optional[~azure.ai.ml.entities.IdentityConfiguration]
+ :keyword scale_settings: The scale settings for the compute.
+ :paramtype scale_settings: Optional[~azure.ai.ml.entities.AutoScaleSettings]
+ :keyword auto_pause_settings: The auto pause settings for the compute.
+ :paramtype auto_pause_settings: Optional[~azure.ai.ml.entities.AutoPauseSettings]
+ :keyword kwargs: Additional keyword arguments passed to the parent class.
+ :paramtype kwargs: Optional[dict]
+
+ .. admonition:: Example:
+
+ .. literalinclude:: ../samples/ml_samples_spark_configurations.py
+ :start-after: [START synapse_spark_compute_configuration]
+ :end-before: [END synapse_spark_compute_configuration]
+ :language: python
+ :dedent: 8
+ :caption: Creating Synapse Spark compute.
+ """
+
+ def __init__(
+ self,
+ *,
+ name: str,
+ description: Optional[str] = None,
+ tags: Optional[Dict[str, str]] = None,
+ node_count: Optional[int] = None,
+ node_family: Optional[str] = None,
+ node_size: Optional[str] = None,
+ spark_version: Optional[str] = None,
+ identity: Optional[IdentityConfiguration] = None,
+ scale_settings: Optional[AutoScaleSettings] = None,
+ auto_pause_settings: Optional[AutoPauseSettings] = None,
+ **kwargs: Any,
+ ) -> None:
+ kwargs[TYPE] = ComputeType.SYNAPSESPARK
+ super().__init__(name=name, description=description, location=kwargs.pop("location", None), tags=tags, **kwargs)
+ self.identity = identity
+ self.node_count = node_count
+ self.node_family = node_family
+ self.node_size = node_size
+ self.spark_version = spark_version
+ self.scale_settings = scale_settings
+ self.auto_pause_settings = auto_pause_settings
+
+ @classmethod
+ def _load_from_rest(cls, rest_obj: ComputeResource) -> "SynapseSparkCompute":
+ prop = rest_obj.properties
+ scale_settings = (
+ # pylint: disable=protected-access
+ AutoScaleSettings._from_auto_scale_settings(prop.properties.auto_scale_properties)
+ if prop.properties.auto_scale_properties
+ else None
+ )
+
+ auto_pause_settings = (
+ # pylint: disable=protected-access
+ AutoPauseSettings._from_auto_pause_settings(prop.properties.auto_pause_properties)
+ if prop.properties.auto_pause_properties
+ else None
+ )
+
+ return SynapseSparkCompute(
+ name=rest_obj.name,
+ id=rest_obj.id,
+ description=prop.description,
+ location=rest_obj.location,
+ resource_id=prop.resource_id,
+ tags=rest_obj.tags if rest_obj.tags else None,
+ created_on=prop.created_on if prop.properties else None,
+ node_count=prop.properties.node_count if prop.properties else None,
+ node_family=prop.properties.node_size_family if prop.properties else None,
+ node_size=prop.properties.node_size if prop.properties else None,
+ spark_version=prop.properties.spark_version if prop.properties else None,
+ # pylint: disable=protected-access
+ identity=IdentityConfiguration._from_compute_rest_object(rest_obj.identity) if rest_obj.identity else None,
+ scale_settings=scale_settings,
+ auto_pause_settings=auto_pause_settings,
+ provisioning_state=prop.provisioning_state,
+ provisioning_errors=(
+ prop.provisioning_errors[0].error.code
+ if (prop.provisioning_errors and len(prop.provisioning_errors) > 0)
+ else None
+ ),
+ )
+
+ def _to_dict(self) -> Dict:
+ res: dict = SynapseSparkComputeSchema(context={BASE_PATH_CONTEXT_KEY: "./"}).dump(self)
+ return res
+
+ @classmethod
+ def _load_from_dict(cls, data: Dict, context: Dict, **kwargs: Any) -> "SynapseSparkCompute":
+ loaded_data = load_from_dict(SynapseSparkComputeSchema, data, context, **kwargs)
+ return SynapseSparkCompute(**loaded_data)
+
+ def _to_rest_object(self) -> ComputeResource:
+ synapsespark_comp = SynapseSpark(
+ name=self.name,
+ compute_type=self.type,
+ resource_id=self.resource_id,
+ description=self.description,
+ )
+ return ComputeResource(
+ location=self.location,
+ properties=synapsespark_comp,
+ name=self.name,
+ identity=(
+ # pylint: disable=protected-access
+ self.identity._to_compute_rest_object()
+ if self.identity
+ else None
+ ),
+ tags=self.tags,
+ )
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_compute/unsupported_compute.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_compute/unsupported_compute.py
new file mode 100644
index 00000000..258fbf6b
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_compute/unsupported_compute.py
@@ -0,0 +1,62 @@
+# ---------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# ---------------------------------------------------------
+from typing import Any, Dict
+
+from azure.ai.ml._restclient.v2022_10_01_preview.models import ComputeResource
+from azure.ai.ml.constants._common import TYPE
+from azure.ai.ml.entities._compute.compute import Compute
+from azure.ai.ml.exceptions import ErrorCategory, ErrorTarget, ValidationException
+
+
+class UnsupportedCompute(Compute):
+ """Unsupported compute resource.
+
+ Only used for displaying compute properties for resources not fully supported in the SDK.
+ """
+
+ def __init__(
+ self,
+ **kwargs: Any,
+ ) -> None:
+ kwargs[TYPE] = "*** Unsupported Compute Type ***"
+ super().__init__(**kwargs)
+
+ @classmethod
+ def _load_from_rest(cls, rest_obj: ComputeResource) -> "UnsupportedCompute":
+ prop = rest_obj.properties
+ if hasattr(rest_obj, "tags"):
+ # TODO(2294131): remove this when DataFactory object has no tags got fixed
+ tags = rest_obj.tags
+ else:
+ tags = None
+ response = UnsupportedCompute(
+ name=rest_obj.name,
+ id=rest_obj.id,
+ description=prop.description,
+ location=rest_obj.location,
+ resource_id=prop.resource_id,
+ tags=tags,
+ provisioning_state=prop.provisioning_state,
+ created_on=prop.additional_properties.get("createdOn", None),
+ )
+ return response
+
+ @classmethod
+ def _load_from_dict(cls, data: Dict, context: Dict, **kwargs: Any) -> "UnsupportedCompute":
+ msg = "Cannot create unsupported compute type."
+ raise ValidationException(
+ message=msg,
+ target=ErrorTarget.COMPUTE,
+ no_personal_data_message=msg,
+ error_category=ErrorCategory.USER_ERROR,
+ )
+
+ def _to_rest_object(self) -> ComputeResource:
+ msg = "Cannot create unsupported compute type."
+ raise ValidationException(
+ message=msg,
+ target=ErrorTarget.COMPUTE,
+ no_personal_data_message=msg,
+ error_category=ErrorCategory.USER_ERROR,
+ )
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_compute/virtual_machine_compute.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_compute/virtual_machine_compute.py
new file mode 100644
index 00000000..90c3ec63
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_compute/virtual_machine_compute.py
@@ -0,0 +1,172 @@
+# ---------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# ---------------------------------------------------------
+from pathlib import Path
+from typing import Any, Dict, Optional
+
+from azure.ai.ml._restclient.v2022_10_01_preview.models import ComputeResource
+from azure.ai.ml._restclient.v2022_10_01_preview.models import VirtualMachine as VMResource
+from azure.ai.ml._restclient.v2022_10_01_preview.models import (
+ VirtualMachineSchemaProperties,
+ VirtualMachineSshCredentials,
+)
+from azure.ai.ml._schema.compute.virtual_machine_compute import VirtualMachineComputeSchema
+from azure.ai.ml.constants._common import BASE_PATH_CONTEXT_KEY, TYPE, DefaultOpenEncoding
+from azure.ai.ml.constants._compute import ComputeType
+from azure.ai.ml.entities._compute.compute import Compute
+from azure.ai.ml.entities._util import load_from_dict
+
+
+class VirtualMachineSshSettings:
+ """SSH settings for a virtual machine.
+
+ :param admin_username: The admin user name. Defaults to None.
+ :type admin_username: str
+ :param admin_password: The admin user password. Defaults to None.
+ Required if `ssh_private_key_file` is not specified.
+ :type admin_password: Optional[str]
+ :param ssh_port: The ssh port number. Default is 22.
+ :type ssh_port: int
+ :param ssh_private_key_file: Path to the file containing the SSH rsa private key.
+ Use "ssh-keygen -t rsa -b 2048" to generate your SSH key pairs.
+ Required if admin_password is not specified.
+ :type ssh_private_key_file: Optional[str]
+
+ .. admonition:: Example:
+
+ .. literalinclude:: ../samples/ml_samples_compute.py
+ :start-after: [START vm_ssh_settings]
+ :end-before: [END vm_ssh_settings]
+ :language: python
+ :dedent: 8
+ :caption: Configuring a VirtualMachineSshSettings object.
+ """
+
+ def __init__(
+ self,
+ *,
+ admin_username: Optional[str],
+ admin_password: Optional[str] = None,
+ ssh_port: Optional[int] = 22,
+ ssh_private_key_file: Optional[str] = None,
+ ) -> None:
+ self.admin_username = admin_username
+ self.admin_password = admin_password
+ self.ssh_port = ssh_port
+ self.ssh_private_key_file = ssh_private_key_file
+
+
+class VirtualMachineCompute(Compute):
+ """Virtual Machine Compute resource.
+
+ :param name: Name of the compute resource.
+ :type name: str
+ :param description: Description of the resource. Defaults to None.
+ :type description: Optional[str]
+ :param resource_id: ARM resource ID of the underlying compute resource.
+ :type resource_id: str
+ :param tags: A set of tags. Contains resource tags defined as key/value pairs.
+ :type tags: Optional[dict]
+ :param ssh_settings: SSH settings. Defaults to None.
+ :type ssh_settings: Optional[~azure.ai.ml.entities.VirtualMachineSshSettings]
+
+ .. admonition:: Example:
+
+ .. literalinclude:: ../samples/ml_samples_compute.py
+ :start-after: [START vm_compute]
+ :end-before: [END vm_compute]
+ :language: python
+ :dedent: 8
+ :caption: Configuring a VirtualMachineCompute object.
+ """
+
+ def __init__(
+ self,
+ *,
+ name: str,
+ description: Optional[str] = None,
+ resource_id: str,
+ tags: Optional[dict] = None,
+ ssh_settings: Optional[VirtualMachineSshSettings] = None,
+ **kwargs: Any,
+ ) -> None:
+ kwargs[TYPE] = ComputeType.VIRTUALMACHINE
+ self._public_key_data: str = kwargs.pop("public_key_data", None)
+ super().__init__(
+ name=name,
+ location=kwargs.pop("location", None),
+ description=description,
+ resource_id=resource_id,
+ tags=tags,
+ **kwargs,
+ )
+ self.ssh_settings = ssh_settings
+
+ @property
+ def public_key_data(self) -> str:
+ """Public key data.
+
+ :return: Public key data.
+ :rtype: str
+ """
+ return self._public_key_data
+
+ @classmethod
+ def _load_from_rest(cls, rest_obj: ComputeResource) -> "VirtualMachineCompute":
+ prop = rest_obj.properties
+ credentials = prop.properties.administrator_account if prop.properties else None
+ ssh_settings_param = None
+ if credentials or (prop.properties and prop.properties.ssh_port):
+ ssh_settings_param = VirtualMachineSshSettings(
+ admin_username=credentials.username if credentials else None,
+ admin_password=credentials.password if credentials else None,
+ ssh_port=prop.properties.ssh_port if prop.properties else None,
+ )
+ response = VirtualMachineCompute(
+ name=rest_obj.name,
+ id=rest_obj.id,
+ description=prop.description,
+ location=rest_obj.location,
+ resource_id=prop.resource_id,
+ tags=rest_obj.tags if rest_obj.tags else None,
+ public_key_data=credentials.public_key_data if credentials else None,
+ provisioning_state=prop.provisioning_state,
+ provisioning_errors=(
+ prop.provisioning_errors[0].error.code
+ if (prop.provisioning_errors and len(prop.provisioning_errors) > 0)
+ else None
+ ),
+ ssh_settings=ssh_settings_param,
+ )
+ return response
+
+ def _to_dict(self) -> Dict:
+ res: dict = VirtualMachineComputeSchema(context={BASE_PATH_CONTEXT_KEY: "./"}).dump(self)
+ return res
+
+ @classmethod
+ def _load_from_dict(cls, data: Dict, context: Dict, **kwargs: Any) -> "VirtualMachineCompute":
+ loaded_data = load_from_dict(VirtualMachineComputeSchema, data, context, **kwargs)
+ return VirtualMachineCompute(**loaded_data)
+
+ def _to_rest_object(self) -> ComputeResource:
+ ssh_key_value = None
+ if self.ssh_settings and self.ssh_settings.ssh_private_key_file:
+ ssh_key_value = Path(self.ssh_settings.ssh_private_key_file).read_text(encoding=DefaultOpenEncoding.READ)
+ credentials = VirtualMachineSshCredentials(
+ username=self.ssh_settings.admin_username if self.ssh_settings else None,
+ password=self.ssh_settings.admin_password if self.ssh_settings else None,
+ public_key_data=self.public_key_data,
+ private_key_data=ssh_key_value,
+ )
+ if self.ssh_settings is not None:
+ properties = VirtualMachineSchemaProperties(
+ ssh_port=self.ssh_settings.ssh_port, administrator_account=credentials
+ )
+ vm_compute = VMResource(
+ properties=properties, # pylint: disable=possibly-used-before-assignment
+ resource_id=self.resource_id,
+ description=self.description,
+ )
+ resource = ComputeResource(name=self.name, location=self.location, tags=self.tags, properties=vm_compute)
+ return resource