about summary refs log tree commit diff
path: root/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_assets
diff options
context:
space:
mode:
Diffstat (limited to '.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_assets')
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_assets/__init__.py17
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_assets/_artifacts/__init__.py5
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_assets/_artifacts/_package/__init__.py5
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_assets/_artifacts/_package/base_environment_source.py48
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_assets/_artifacts/_package/inferencing_server.py216
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_assets/_artifacts/_package/model_configuration.py55
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_assets/_artifacts/_package/model_package.py338
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_assets/_artifacts/artifact.py131
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_assets/_artifacts/code.py142
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_assets/_artifacts/data.py237
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_assets/_artifacts/feature_set.py220
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_assets/_artifacts/index.py137
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_assets/_artifacts/model.py219
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_assets/asset.py145
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_assets/auto_delete_setting.py42
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_assets/environment.py478
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_assets/federated_learning_silo.py123
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_assets/intellectual_property.py49
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_assets/workspace_asset_reference.py87
19 files changed, 2694 insertions, 0 deletions
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_assets/__init__.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_assets/__init__.py
new file mode 100644
index 00000000..5ee0f971
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_assets/__init__.py
@@ -0,0 +1,17 @@
+# ---------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# ---------------------------------------------------------
+
+__path__ = __import__("pkgutil").extend_path(__path__, __name__)
+
+
+from ._artifacts.artifact import Artifact
+from ._artifacts.code import Code
+from ._artifacts.data import Data
+from ._artifacts.index import Index
+from ._artifacts.model import Model
+from .environment import Environment
+from ._artifacts._package.model_package import ModelPackage
+from .workspace_asset_reference import WorkspaceAssetReference
+
+__all__ = ["Artifact", "Model", "Code", "Data", "Index", "Environment", "WorkspaceAssetReference", "ModelPackage"]
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_assets/_artifacts/__init__.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_assets/_artifacts/__init__.py
new file mode 100644
index 00000000..fdf8caba
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_assets/_artifacts/__init__.py
@@ -0,0 +1,5 @@
+# ---------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# ---------------------------------------------------------
+
+__path__ = __import__("pkgutil").extend_path(__path__, __name__)
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_assets/_artifacts/_package/__init__.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_assets/_artifacts/_package/__init__.py
new file mode 100644
index 00000000..fdf8caba
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_assets/_artifacts/_package/__init__.py
@@ -0,0 +1,5 @@
+# ---------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# ---------------------------------------------------------
+
+__path__ = __import__("pkgutil").extend_path(__path__, __name__)
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_assets/_artifacts/_package/base_environment_source.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_assets/_artifacts/_package/base_environment_source.py
new file mode 100644
index 00000000..1be67144
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_assets/_artifacts/_package/base_environment_source.py
@@ -0,0 +1,48 @@
+# ---------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# ---------------------------------------------------------
+
+# pylint: disable=redefined-builtin
+
+from typing import Dict, Optional
+
+from azure.ai.ml._restclient.v2023_08_01_preview.models import BaseEnvironmentId as RestBaseEnvironmentId
+from azure.ai.ml._schema.assets.package.base_environment_source import BaseEnvironmentSourceSchema
+from azure.ai.ml._utils._experimental import experimental
+from azure.ai.ml.constants._common import BASE_PATH_CONTEXT_KEY
+
+
+@experimental
+class BaseEnvironment:
+    """Base environment type.
+
+    All required parameters must be populated in order to send to Azure.
+
+    :param type: The type of the base environment.
+    :type type: str
+    :param resource_id: The resource id of the base environment. e.g. azureml:name:version
+    :type resource_id: str
+
+    .. admonition:: Example:
+
+        .. literalinclude:: ../samples/ml_samples_misc.py
+            :start-after: [START base_env_entity_create]
+            :end-before: [END base_env_entity_create]
+            :language: python
+            :dedent: 8
+            :caption: Create a Base Environment object.
+    """
+
+    def __init__(self, type: str, resource_id: Optional[str] = None):
+        self.type = type
+        self.resource_id = resource_id
+
+    @classmethod
+    def _from_rest_object(cls, rest_obj: RestBaseEnvironmentId) -> "RestBaseEnvironmentId":
+        return BaseEnvironment(type=rest_obj.base_environment_source_type, resource_id=rest_obj.resource_id)
+
+    def _to_dict(self) -> Dict:
+        return dict(BaseEnvironmentSourceSchema(context={BASE_PATH_CONTEXT_KEY: "./"}).dump(self))
+
+    def _to_rest_object(self) -> RestBaseEnvironmentId:
+        return RestBaseEnvironmentId(base_environment_source_type=self.type, resource_id=self.resource_id)
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_assets/_artifacts/_package/inferencing_server.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_assets/_artifacts/_package/inferencing_server.py
new file mode 100644
index 00000000..6e685244
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_assets/_artifacts/_package/inferencing_server.py
@@ -0,0 +1,216 @@
+# ---------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# ---------------------------------------------------------
+
+# pylint: disable=protected-access,unused-argument
+
+from typing import Any, Optional
+
+from azure.ai.ml._restclient.v2023_02_01_preview.models import (
+    AzureMLOnlineInferencingServer as RestAzureMLOnlineInferencingServer,
+)
+from azure.ai.ml._restclient.v2023_02_01_preview.models import CustomInferencingServer as RestCustomInferencingServer
+from azure.ai.ml._restclient.v2023_02_01_preview.models import (
+    OnlineInferenceConfiguration as RestOnlineInferenceConfiguration,
+)
+from azure.ai.ml._restclient.v2023_02_01_preview.models import Route as RestRoute
+from azure.ai.ml._restclient.v2023_02_01_preview.models import TritonInferencingServer as RestTritonInferencingServer
+from azure.ai.ml._restclient.v2023_08_01_preview.models import (
+    AzureMLBatchInferencingServer as RestAzureMLBatchInferencingServer,
+)
+from azure.ai.ml._restclient.v2023_08_01_preview.models import (
+    AzureMLOnlineInferencingServer as RestAzureMLOnlineInferencingServer,
+)
+from azure.ai.ml._utils._experimental import experimental
+
+from ...._deployment.code_configuration import CodeConfiguration
+
+
+@experimental
+class AzureMLOnlineInferencingServer:
+    """Azure ML online inferencing configurations.
+
+    :param code_configuration: The code configuration of the inferencing server.
+    :type code_configuration: str
+    :ivar type: The type of the inferencing server.
+    """
+
+    def __init__(self, *, code_configuration: Optional[CodeConfiguration] = None, **kwargs: Any):
+        self.type = "azureml_online"
+        self.code_configuration = code_configuration
+
+    @classmethod
+    def _from_rest_object(cls, rest_obj: RestAzureMLOnlineInferencingServer) -> "RestAzureMLOnlineInferencingServer":
+        return AzureMLOnlineInferencingServer(type=rest_obj.server_type, code_configuration=rest_obj.code_configuration)
+
+    def _to_rest_object(self) -> RestAzureMLOnlineInferencingServer:
+        return RestAzureMLOnlineInferencingServer(server_type=self.type, code_configuration=self.code_configuration)
+
+
+@experimental
+class AzureMLBatchInferencingServer:
+    """Azure ML batch inferencing configurations.
+
+    :param code_configuration: The code configuration of the inferencing server.
+    :type code_configuration: azure.ai.ml.entities.CodeConfiguration
+    :ivar type: The type of the inferencing server.
+    """
+
+    def __init__(self, *, code_configuration: Optional[CodeConfiguration] = None, **kwargs: Any):
+        self.type = "azureml_batch"
+        self.code_configuration = code_configuration
+
+    @classmethod
+    def _from_rest_object(cls, rest_obj: RestAzureMLBatchInferencingServer) -> "RestAzureMLBatchInferencingServer":
+        return AzureMLBatchInferencingServer(code_configuration=rest_obj.code_configuration)
+
+    def _to_rest_object(self) -> RestAzureMLBatchInferencingServer:
+        return RestAzureMLBatchInferencingServer(server_type=self.type, code_configuration=self.code_configuration)
+
+
+@experimental
+class TritonInferencingServer:
+    """Azure ML triton inferencing configurations.
+
+    :param inference_configuration: The inference configuration of the inferencing server.
+    :type inference_configuration: azure.ai.ml.entities.CodeConfiguration
+    :ivar type: The type of the inferencing server.
+    """
+
+    def __init__(self, *, inference_configuration: Optional[CodeConfiguration] = None, **kwargs: Any):
+        self.type = "triton"
+        self.inference_configuration = inference_configuration
+
+    @classmethod
+    def _from_rest_object(cls, rest_obj: RestTritonInferencingServer) -> "RestTritonInferencingServer":
+        return CustomInferencingServer(
+            type=rest_obj.server_type, inference_configuration=rest_obj.inference_configuration
+        )
+
+    def _to_rest_object(self) -> RestTritonInferencingServer:
+        return RestCustomInferencingServer(server_type=self.type, inference_configuration=self.inference_configuration)
+
+
+@experimental
+class Route:
+    """Route.
+
+    :param port: The port of the route.
+    :type port: str
+    :param path: The path of the route.
+    :type path: str
+    """
+
+    def __init__(self, *, port: Optional[str] = None, path: Optional[str] = None):
+        self.port = port
+        self.path = path
+
+    @classmethod
+    def _from_rest_object(cls, rest_obj: RestRoute) -> "RestRoute":
+        return Route(port=rest_obj.port, path=rest_obj.path)
+
+    def _to_rest_object(self) -> Optional[RestRoute]:
+        return RestRoute(port=self.port, path=self.path)
+
+
+@experimental
+class OnlineInferenceConfiguration:
+    """Online inference configurations.
+
+    :param liveness_route: The liveness route of the online inference configuration.
+    :type liveness_route: Route
+    :param readiness_route: The readiness route of the online inference configuration.
+    :type readiness_route: Route
+    :param scoring_route: The scoring route of the online inference configuration.
+    :type scoring_route: Route
+    :param entry_script: The entry script of the online inference configuration.
+    :type entry_script: str
+    :param configuration: The configuration of the online inference configuration.
+    :type configuration: dict
+    """
+
+    def __init__(
+        self,
+        liveness_route: Optional[Route] = None,
+        readiness_route: Optional[Route] = None,
+        scoring_route: Optional[Route] = None,
+        entry_script: Optional[str] = None,
+        configuration: Optional[dict] = None,
+    ):
+        self.liveness_route = liveness_route
+        self.readiness_route = readiness_route
+        self.scoring_route = scoring_route
+        self.entry_script = entry_script
+        self.configuration = configuration
+
+    @classmethod
+    def _from_rest_object(cls, rest_obj: RestOnlineInferenceConfiguration) -> "RestOnlineInferenceConfiguration":
+        return OnlineInferenceConfiguration(
+            liveness_route=Route._from_rest_object(rest_obj.liveness_route),
+            readiness_route=Route._from_rest_object(rest_obj.readiness_route),
+            scoring_route=Route._from_rest_object(rest_obj.scoring_route),
+            entry_script=rest_obj.entry_script,
+            configuration=rest_obj.configuration,
+        )
+
+    def _to_rest_object(self) -> RestOnlineInferenceConfiguration:
+        if self.liveness_route is not None and self.readiness_route is not None and self.scoring_route is not None:
+            return RestOnlineInferenceConfiguration(
+                liveness_route=self.liveness_route._to_rest_object(),
+                readiness_route=self.readiness_route._to_rest_object(),
+                scoring_route=self.scoring_route._to_rest_object(),
+                entry_script=self.entry_script,
+                configuration=self.configuration,
+            )
+
+        if self.liveness_route is None:
+            return RestOnlineInferenceConfiguration(
+                readiness_route=self.readiness_route._to_rest_object() if self.readiness_route is not None else None,
+                scoring_route=self.scoring_route._to_rest_object() if self.scoring_route is not None else None,
+                entry_script=self.entry_script,
+                configuration=self.configuration,
+            )
+
+        if self.readiness_route is None:
+            return RestOnlineInferenceConfiguration(
+                liveness_route=self.liveness_route._to_rest_object(),
+                scoring_route=self.scoring_route._to_rest_object() if self.scoring_route is not None else None,
+                entry_script=self.entry_script,
+                configuration=self.configuration,
+            )
+
+        if self.scoring_route is None:
+            return RestOnlineInferenceConfiguration(
+                liveness_route=self.liveness_route._to_rest_object(),
+                readiness_route=self.readiness_route._to_rest_object(),
+                entry_script=self.entry_script,
+                configuration=self.configuration,
+            )
+
+        return RestOnlineInferenceConfiguration(
+            entry_script=self.entry_script,
+            configuration=self.configuration,
+        )
+
+
+@experimental
+class CustomInferencingServer:
+    """Custom inferencing configurations.
+
+    :param inference_configuration: The inference configuration of the inferencing server.
+    :type inference_configuration: OnlineInferenceConfiguration
+    :ivar type: The type of the inferencing server.
+    """
+
+    def __init__(self, *, inference_configuration: Optional[OnlineInferenceConfiguration] = None, **kwargs: Any):
+        self.type = "custom"
+        self.inference_configuration = inference_configuration
+
+    @classmethod
+    def _from_rest_object(cls, rest_obj: RestCustomInferencingServer) -> "RestCustomInferencingServer":
+        return CustomInferencingServer(
+            type=rest_obj.server_type, inference_configuration=rest_obj.inference_configuration
+        )
+
+    def _to_rest_object(self) -> RestCustomInferencingServer:
+        return RestCustomInferencingServer(server_type=self.type, inference_configuration=self.inference_configuration)
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_assets/_artifacts/_package/model_configuration.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_assets/_artifacts/_package/model_configuration.py
new file mode 100644
index 00000000..73c777cf
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_assets/_artifacts/_package/model_configuration.py
@@ -0,0 +1,55 @@
+# ---------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# ----------------------------------------------------------
+
+
+from typing import Optional
+
+from azure.ai.ml._exception_helper import log_and_raise_error
+from azure.ai.ml._restclient.v2023_04_01_preview.models import ModelConfiguration as RestModelConfiguration
+from azure.ai.ml._utils._experimental import experimental
+from azure.ai.ml.exceptions import ErrorCategory, ErrorTarget, ValidationErrorType, ValidationException
+
+
+@experimental
+class ModelConfiguration:
+    """ModelConfiguration.
+
+    :param mode: The mode of the model. Possible values include: "Copy", "Download".
+    :type mode: str
+    :param mount_path: The mount path of the model.
+    :type mount_path: str
+
+    .. admonition:: Example:
+
+        .. literalinclude:: ../samples/ml_samples_misc.py
+            :start-after: [START model_configuration_entity_create]
+            :end-before: [END model_configuration_entity_create]
+            :language: python
+            :dedent: 8
+            :caption: Creating a Model Configuration object.
+    """
+
+    def __init__(self, *, mode: Optional[str] = None, mount_path: Optional[str] = None):
+        self.mode = mode
+        self.mount_path = mount_path
+
+    @classmethod
+    def _from_rest_object(cls, rest_obj: RestModelConfiguration) -> "ModelConfiguration":
+        return ModelConfiguration(mode=rest_obj.mode, mount_path=rest_obj.mount_path)
+
+    def _to_rest_object(self) -> RestModelConfiguration:
+        self._validate()
+        return RestModelConfiguration(mode=self.mode, mount_path=self.mount_path)
+
+    def _validate(self) -> None:
+        if self.mode is not None and self.mode.lower() not in ["copy", "download"]:
+            msg = "Mode must be either 'Copy' or 'Download'"
+            err = ValidationException(
+                message=msg,
+                target=ErrorTarget.MODEL,
+                no_personal_data_message=msg,
+                error_category=ErrorCategory.USER_ERROR,
+                error_type=ValidationErrorType.INVALID_VALUE,
+            )
+            log_and_raise_error(err)
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_assets/_artifacts/_package/model_package.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_assets/_artifacts/_package/model_package.py
new file mode 100644
index 00000000..c4797c20
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_assets/_artifacts/_package/model_package.py
@@ -0,0 +1,338 @@
+# ---------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# ---------------------------------------------------------
+
+# pylint: disable=protected-access, redefined-builtin
+
+import re
+from os import PathLike
+from pathlib import Path
+from typing import IO, Any, AnyStr, Dict, List, Optional, Union
+
+from azure.ai.ml._restclient.v2023_08_01_preview.models import CodeConfiguration
+from azure.ai.ml._restclient.v2023_08_01_preview.models import ModelPackageInput as RestModelPackageInput
+from azure.ai.ml._restclient.v2023_08_01_preview.models import PackageInputPathId as RestPackageInputPathId
+from azure.ai.ml._restclient.v2023_08_01_preview.models import PackageInputPathUrl as RestPackageInputPathUrl
+from azure.ai.ml._restclient.v2023_08_01_preview.models import PackageInputPathVersion as RestPackageInputPathVersion
+from azure.ai.ml._restclient.v2023_08_01_preview.models import PackageRequest, PackageResponse
+from azure.ai.ml._schema.assets.package.model_package import ModelPackageSchema
+from azure.ai.ml._utils._experimental import experimental
+from azure.ai.ml._utils.utils import dump_yaml_to_file, snake_to_pascal
+from azure.ai.ml.constants._common import BASE_PATH_CONTEXT_KEY, PARAMS_OVERRIDE_KEY
+from azure.ai.ml.entities._resource import Resource
+from azure.ai.ml.entities._util import load_from_dict
+
+from .base_environment_source import BaseEnvironment
+from .inferencing_server import AzureMLBatchInferencingServer, AzureMLOnlineInferencingServer
+from .model_configuration import ModelConfiguration
+
+
+@experimental
+class PackageInputPathId:
+    """Package input path specified with a resource ID.
+
+    :param input_path_type: The type of the input path. Accepted values are "Url", "PathId", and "PathVersion".
+    :type input_path_type: Optional[str]
+    :param resource_id: The resource ID of the input path. e.g. "azureml://subscriptions/<>/resourceGroups/
+        <>/providers/Microsoft.MachineLearningServices/workspaces/<>/data/<>/versions/<>".
+    :type resource_id: Optional[str]
+    """
+
+    def __init__(
+        self,
+        *,
+        input_path_type: Optional[str] = None,
+        resource_id: Optional[str] = None,
+    ) -> None:
+        self.input_path_type = input_path_type
+        self.resource_id = resource_id
+
+    def _to_rest_object(self) -> RestPackageInputPathId:
+        return RestPackageInputPathId(
+            input_path_type=self.input_path_type,
+            resource_id=self.resource_id,
+        )
+
+    @classmethod
+    def _from_rest_object(cls, package_input_path_id_rest_object: RestPackageInputPathId) -> "PackageInputPathId":
+        return PackageInputPathId(
+            input_path_type=package_input_path_id_rest_object.input_path_type,
+            resource_id=package_input_path_id_rest_object.resource_id,
+        )
+
+
+@experimental
+class PackageInputPathVersion:
+    """Package input path specified with a resource name and version.
+
+    :param input_path_type: The type of the input path. Accepted values are "Url", "PathId", and "PathVersion".
+    :type input_path_type: Optional[str]
+    :param resource_name: The resource name of the input path.
+    :type resource_name: Optional[str]
+    :param resource_version: The resource version of the input path.
+    :type resource_version: Optional[str]
+    """
+
+    def __init__(
+        self,
+        *,
+        input_path_type: Optional[str] = None,
+        resource_name: Optional[str] = None,
+        resource_version: Optional[str] = None,
+    ) -> None:
+        self.input_path_type = input_path_type
+        self.resource_name = resource_name
+        self.resource_version = resource_version
+
+    def _to_rest_object(self) -> RestPackageInputPathVersion:
+        return RestPackageInputPathVersion(
+            input_path_type=self.input_path_type,
+            resource_name=self.resource_name,
+            resource_version=self.resource_version,
+        )
+
+    @classmethod
+    def _from_rest_object(
+        cls, package_input_path_version_rest_object: RestPackageInputPathVersion
+    ) -> "PackageInputPathVersion":
+        return PackageInputPathVersion(
+            input_path_type=package_input_path_version_rest_object.input_path_type,
+            resource_name=package_input_path_version_rest_object.resource_name,
+            resource_version=package_input_path_version_rest_object.resource_version,
+        )
+
+
+@experimental
+class PackageInputPathUrl:
+    """Package input path specified with a url.
+
+    :param input_path_type: The type of the input path. Accepted values are "Url", "PathId", and "PathVersion".
+    :type input_path_type: Optional[str]
+    :param url: The url of the input path. e.g. "azureml://subscriptions/<>/resourceGroups/
+        <>/providers/Microsoft.MachineLearningServices/workspaces/data/<>/versions/<>".
+    :type url: Optional[str]
+    """
+
+    def __init__(self, *, input_path_type: Optional[str] = None, url: Optional[str] = None) -> None:
+        self.input_path_type = input_path_type
+        self.url = url
+
+    def _to_rest_object(self) -> RestPackageInputPathUrl:
+        return RestPackageInputPathUrl(
+            input_path_type=self.input_path_type,
+            url=self.url,
+        )
+
+    @classmethod
+    def _from_rest_object(cls, package_input_path_url_rest_object: RestPackageInputPathUrl) -> "PackageInputPathUrl":
+        return PackageInputPathUrl(
+            input_path_type=package_input_path_url_rest_object.input_path_type,
+            url=package_input_path_url_rest_object.url,
+        )
+
+
+@experimental
+class ModelPackageInput:
+    """Model package input.
+
+    :param type: The type of the input.
+    :type type: Optional[str]
+    :param path: The path of the input.
+    :type path: Optional[Union[~azure.ai.ml.entities.PackageInputPathId, ~azure.ai.ml.entities.PackageInputPathUrl,
+        ~azure.ai.ml.entities.PackageInputPathVersion]]
+    :param mode: The input mode.
+    :type mode: Optional[str]
+    :param mount_path: The mount path for the input.
+    :type mount_path: Optional[str]
+
+    .. admonition:: Example:
+
+        .. literalinclude:: ../samples/ml_samples_misc.py
+            :start-after: [START model_package_input_entity_create]
+            :end-before: [END model_package_input_entity_create]
+            :language: python
+            :dedent: 8
+            :caption: Create a Model Package Input object.
+    """
+
+    def __init__(
+        self,
+        *,
+        type: Optional[str] = None,
+        path: Optional[Union[PackageInputPathId, PackageInputPathUrl, PackageInputPathVersion]] = None,
+        mode: Optional[str] = None,
+        mount_path: Optional[str] = None,
+    ) -> None:
+        self.type = type
+        self.path = path
+        self.mode = mode
+        self.mount_path = mount_path
+
+    def _to_rest_object(self) -> RestModelPackageInput:
+        if self.path is None:
+            return RestModelPackageInput(
+                input_type=snake_to_pascal(self.type),
+                path=None,
+                mode=snake_to_pascal(self.mode),
+                mount_path=self.mount_path,
+            )
+        return RestModelPackageInput(
+            input_type=snake_to_pascal(self.type),
+            path=self.path._to_rest_object(),
+            mode=snake_to_pascal(self.mode),
+            mount_path=self.mount_path,
+        )
+
+    @classmethod
+    def _from_rest_object(cls, model_package_input_rest_object: RestModelPackageInput) -> "ModelPackageInput":
+        return ModelPackageInput(
+            type=model_package_input_rest_object.input_type,
+            path=model_package_input_rest_object.path._from_rest_object(),
+            mode=model_package_input_rest_object.mode,
+            mount_path=model_package_input_rest_object.mount_path,
+        )
+
+
+@experimental
+class ModelPackage(Resource, PackageRequest):
+    """Model package.
+
+    :param target_environment_name: The target environment name for the model package.
+    :type target_environment_name: str
+    :param inferencing_server: The inferencing server of the model package.
+    :type inferencing_server: Union[~azure.ai.ml.entities.AzureMLOnlineInferencingServer,
+        ~azure.ai.ml.entities.AzureMLBatchInferencingServer]
+    :param base_environment_source: The base environment source of the model package.
+    :type base_environment_source: Optional[~azure.ai.ml.entities.BaseEnvironment]
+    :param target_environment_version: The version of the model package.
+    :type target_environment_version: Optional[str]
+    :param environment_variables: The environment variables of the model package.
+    :type environment_variables: Optional[dict[str, str]]
+    :param inputs: The inputs of the model package.
+    :type inputs: Optional[list[~azure.ai.ml.entities.ModelPackageInput]]
+    :param model_configuration: The model configuration.
+    :type model_configuration: Optional[~azure.ai.ml.entities.ModelConfiguration]
+    :param tags: The tags of the model package.
+    :type tags: Optional[dict[str, str]]
+
+    .. admonition:: Example:
+
+        .. literalinclude:: ../samples/ml_samples_misc.py
+            :start-after: [START model_package_entity_create]
+            :end-before: [END model_package_entity_create]
+            :language: python
+            :dedent: 8
+            :caption: Create a Model Package object.
+    """
+
+    def __init__(
+        self,
+        *,
+        target_environment: Union[str, Dict[str, str]],
+        inferencing_server: Union[AzureMLOnlineInferencingServer, AzureMLBatchInferencingServer],
+        base_environment_source: Optional[BaseEnvironment] = None,
+        environment_variables: Optional[Dict[str, str]] = None,
+        inputs: Optional[List[ModelPackageInput]] = None,
+        model_configuration: Optional[ModelConfiguration] = None,
+        tags: Optional[Dict[str, str]] = None,
+        **kwargs: Any,
+    ):
+        if isinstance(target_environment, dict):
+            target_environment = target_environment["name"]
+            env_version = None
+        else:
+            parse_id = re.match(r"azureml:(\w+):(\d+)$", target_environment)
+
+            if parse_id:
+                target_environment = parse_id.group(1)
+                env_version = parse_id.group(2)
+            else:
+                env_version = None
+
+        super().__init__(
+            name=target_environment,
+            target_environment_id=target_environment,
+            base_environment_source=base_environment_source,
+            inferencing_server=inferencing_server,
+            model_configuration=model_configuration,
+            inputs=inputs,
+            tags=tags,
+            environment_variables=environment_variables,
+        )
+        self.environment_version = env_version
+
+    @classmethod
+    def _load(
+        cls,
+        data: Optional[Dict] = None,
+        yaml_path: Optional[Union[PathLike, str]] = None,
+        params_override: Optional[list] = None,
+        **kwargs: Any,
+    ) -> "ModelPackage":
+        params_override = params_override or []
+        data = data or {}
+        context = {
+            BASE_PATH_CONTEXT_KEY: Path(yaml_path).parent if yaml_path else Path("./"),
+            PARAMS_OVERRIDE_KEY: params_override,
+        }
+        res: ModelPackage = load_from_dict(ModelPackageSchema, data, context, **kwargs)
+        return res
+
+    def dump(
+        self,
+        dest: Union[str, PathLike, IO[AnyStr]],
+        **kwargs: Any,
+    ) -> None:
+        """Dumps the job content into a file in YAML format.
+
+        :param dest: The local path or file stream to write the YAML content to.
+            If dest is a file path, a new file will be created.
+            If dest is an open file, the file will be written to directly.
+        :type dest: Union[PathLike, str, IO[AnyStr]]
+        :raises FileExistsError: Raised if dest is a file path and the file already exists.
+        :raises IOError: Raised if dest is an open file and the file is not writable.
+        """
+        yaml_serialized = self._to_dict()
+        dump_yaml_to_file(dest, yaml_serialized, default_flow_style=False)
+
+    def _to_dict(self) -> Dict:
+        return dict(ModelPackageSchema(context={BASE_PATH_CONTEXT_KEY: "./"}).dump(self))
+
+    @classmethod
+    def _from_rest_object(cls, model_package_rest_object: PackageResponse) -> Any:
+        target_environment_id = model_package_rest_object.target_environment_id
+        return target_environment_id
+
+    def _to_rest_object(self) -> PackageRequest:
+        code = None
+
+        if (
+            self.inferencing_server
+            and hasattr(self.inferencing_server, "code_configuration")
+            and self.inferencing_server.code_configuration
+        ):
+            self.inferencing_server.code_configuration._validate()
+            code_id = (
+                self.inferencing_server.code_configuration.code
+                if isinstance(self.inferencing_server.code_configuration.code, str)
+                else self.inferencing_server.code_configuration.code.id
+            )
+            code = CodeConfiguration(
+                code_id=code_id,
+                scoring_script=self.inferencing_server.code_configuration.scoring_script,
+            )
+            self.inferencing_server.code_configuration = code
+
+        package_request = PackageRequest(
+            target_environment_id=self.target_environment_id,
+            base_environment_source=(
+                self.base_environment_source._to_rest_object() if self.base_environment_source else None
+            ),
+            inferencing_server=self.inferencing_server._to_rest_object() if self.inferencing_server else None,
+            model_configuration=self.model_configuration._to_rest_object() if self.model_configuration else None,
+            inputs=[input._to_rest_object() for input in self.inputs] if self.inputs else None,
+            tags=self.tags,
+            environment_variables=self.environment_variables,
+        )
+
+        return package_request
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_assets/_artifacts/artifact.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_assets/_artifacts/artifact.py
new file mode 100644
index 00000000..f82e2aa0
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_assets/_artifacts/artifact.py
@@ -0,0 +1,131 @@
+# ---------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# ---------------------------------------------------------
+from abc import abstractmethod
+from os import PathLike
+from pathlib import Path, PurePosixPath
+from typing import Any, Dict, Optional, Union
+from urllib.parse import urljoin
+
+from azure.ai.ml._utils.utils import is_mlflow_uri, is_url
+from azure.ai.ml.entities._assets.asset import Asset
+
+
+class ArtifactStorageInfo:
+    def __init__(
+        self,
+        name: str,
+        version: str,
+        relative_path: str,
+        datastore_arm_id: Optional[str],
+        container_name: str,
+        storage_account_url: Optional[str] = None,
+        is_file: Optional[bool] = None,
+        indicator_file: Optional[str] = None,
+    ):
+        self.name = name
+        self.version = version
+        self.relative_path = relative_path
+        self.datastore_arm_id = datastore_arm_id
+        self.container_name = container_name
+        self.storage_account_url = storage_account_url
+        self.is_file = is_file
+        self.indicator_file = indicator_file
+
+    @property
+    def full_storage_path(self) -> Optional[str]:
+        if self.storage_account_url is None:
+            return f"{self.container_name}/{self.relative_path}"
+        return urljoin(self.storage_account_url, f"{self.container_name}/{self.relative_path}")
+
+    @property
+    def subdir_path(self) -> Optional[str]:
+        if self.is_file:
+            path = PurePosixPath(self.relative_path).parent
+            if self.storage_account_url is None:
+                return f"{self.container_name}/{path}"
+            return urljoin(self.storage_account_url, f"{self.container_name}/{path}")
+        return self.full_storage_path
+
+
+class Artifact(Asset):
+    """Base class for artifact, can't be instantiated directly.
+
+    :param name: Name of the resource.
+    :type name: str
+    :param version: Version of the resource.
+    :type version: str
+    :param path: The local or remote path to the asset.
+    :type path: Union[str, os.PathLike]
+    :param description: Description of the resource.
+    :type description: str
+    :param tags: Tag dictionary. Tags can be added, removed, and updated.
+    :type tags: dict[str, str]
+    :param properties: The asset property dictionary.
+    :type properties: dict[str, str]
+    :param datastore: The datastore to upload the local artifact to.
+    :type datastore: str
+    :param kwargs: A dictionary of additional configuration parameters.
+    :type kwargs: dict
+    """
+
+    def __init__(
+        self,
+        name: Optional[str] = None,
+        version: Optional[str] = None,
+        description: Optional[str] = None,
+        tags: Optional[Dict] = None,
+        properties: Optional[Dict] = None,
+        path: Optional[Union[str, PathLike]] = None,
+        datastore: Optional[str] = None,
+        **kwargs: Any,
+    ):
+        super().__init__(
+            name=name,
+            version=version,
+            description=description,
+            tags=tags,
+            properties=properties,
+            **kwargs,
+        )
+        self.path = path
+        self.datastore = datastore
+
+    @property
+    def path(self) -> Optional[Union[str, PathLike]]:
+        return self._path
+
+    @path.setter
+    def path(self, value: Optional[Union[str, PathLike]]) -> None:
+        if not value or is_url(value) or Path(value).is_absolute() or is_mlflow_uri(value):
+            self._path = value
+        else:
+            self._path = Path(self.base_path, value).resolve()
+
+    @abstractmethod
+    def _to_dict(self) -> Dict:
+        pass
+
+    def __eq__(self, other: Any) -> bool:
+        return (
+            type(self) == type(other)  # pylint: disable = unidiomatic-typecheck
+            and self.name == other.name
+            and self.id == other.id
+            and self.version == other.version
+            and self.description == other.description
+            and self.tags == other.tags
+            and self.properties == other.properties
+            and self.base_path == other.base_path
+            and self._is_anonymous == other._is_anonymous
+        )
+
+    def __ne__(self, other: Any) -> bool:
+        return not self.__eq__(other)
+
+    @abstractmethod
+    def _update_path(self, asset_artifact: ArtifactStorageInfo) -> None:
+        """Updates an an artifact with the remote path of a local upload.
+
+        :param asset_artifact: The asset storage info of the artifact
+        :type asset_artifact: ArtifactStorageInfo
+        """
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_assets/_artifacts/code.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_assets/_artifacts/code.py
new file mode 100644
index 00000000..b08149ab
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_assets/_artifacts/code.py
@@ -0,0 +1,142 @@
+# ---------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# ---------------------------------------------------------
+import os
+from os import PathLike
+from pathlib import Path
+from typing import Any, Dict, Optional, Union
+
+from azure.ai.ml._restclient.v2022_05_01.models import CodeVersionData, CodeVersionDetails
+from azure.ai.ml._schema import CodeAssetSchema
+from azure.ai.ml._utils._arm_id_utils import AMLVersionedArmId
+from azure.ai.ml._utils._asset_utils import IgnoreFile, get_content_hash, get_content_hash_version, get_ignore_file
+from azure.ai.ml.constants._common import BASE_PATH_CONTEXT_KEY, PARAMS_OVERRIDE_KEY, ArmConstants
+from azure.ai.ml.entities._assets import Artifact
+from azure.ai.ml.entities._system_data import SystemData
+from azure.ai.ml.entities._util import load_from_dict
+
+from .artifact import ArtifactStorageInfo
+
+
+class Code(Artifact):
+    """Code for training and scoring.
+
+    :param name: Name of the resource.
+    :type name: str
+    :param version: Version of the resource.
+    :type version: str
+    :param path: A local path or a remote uri. A datastore remote uri example is like,
+        "azureml://subscriptions/{}/resourcegroups/{}/workspaces/{}/datastores/{}/paths/path_on_datastore/"
+    :type path: str
+    :param description: Description of the resource.
+    :type description: str
+    :param tags: Tag dictionary. Tags can be added, removed, and updated.
+    :type tags: dict[str, str]
+    :param properties: The asset property dictionary.
+    :type properties: dict[str, str]
+    :param ignore_file: Ignore file for the resource.
+    :type ignore_file: IgnoreFile
+    :param kwargs: A dictionary of additional configuration parameters.
+    :type kwargs: dict
+    """
+
+    def __init__(
+        self,
+        *,
+        name: Optional[str] = None,
+        version: Optional[str] = None,
+        description: Optional[str] = None,
+        tags: Optional[Dict] = None,
+        properties: Optional[Dict] = None,
+        path: Optional[Union[str, PathLike]] = None,
+        ignore_file: Optional[IgnoreFile] = None,
+        **kwargs: Any,
+    ):
+        super().__init__(
+            name=name,
+            version=version,
+            description=description,
+            tags=tags,
+            properties=properties,
+            path=path,
+            **kwargs,
+        )
+        self._arm_type = ArmConstants.CODE_VERSION_TYPE
+        if self.path and os.path.isabs(self.path):
+            # Only calculate hash for local files
+            self._ignore_file = get_ignore_file(self.path) if ignore_file is None else ignore_file
+            self._hash_sha256 = get_content_hash(self.path, self._ignore_file)
+
+    @classmethod
+    def _load(
+        cls,
+        data: Optional[Dict] = None,
+        yaml_path: Optional[Union[PathLike, str]] = None,
+        params_override: Optional[list] = None,
+        **kwargs: Any,
+    ) -> "Code":
+        data = data or {}
+        params_override = params_override or []
+        context = {
+            BASE_PATH_CONTEXT_KEY: Path(yaml_path).parent if yaml_path else Path("./"),
+            PARAMS_OVERRIDE_KEY: params_override,
+        }
+        res: Code = load_from_dict(CodeAssetSchema, data, context, **kwargs)
+        return res
+
+    def _to_dict(self) -> Dict:
+        res: dict = CodeAssetSchema(context={BASE_PATH_CONTEXT_KEY: "./"}).dump(self)
+        return res
+
+    @classmethod
+    def _from_rest_object(cls, code_rest_object: CodeVersionData) -> "Code":
+        rest_code_version: CodeVersionDetails = code_rest_object.properties
+        arm_id = AMLVersionedArmId(arm_id=code_rest_object.id)
+        code = Code(
+            id=code_rest_object.id,
+            name=arm_id.asset_name,
+            version=arm_id.asset_version,
+            path=rest_code_version.code_uri,
+            description=rest_code_version.description,
+            tags=rest_code_version.tags,
+            properties=rest_code_version.properties,
+            # pylint: disable=protected-access
+            creation_context=SystemData._from_rest_object(code_rest_object.system_data),
+            is_anonymous=rest_code_version.is_anonymous,
+        )
+        return code
+
+    def _to_rest_object(self) -> CodeVersionData:
+        properties = {}
+        if hasattr(self, "_hash_sha256"):
+            properties["hash_sha256"] = self._hash_sha256
+            properties["hash_version"] = get_content_hash_version()
+        code_version = CodeVersionDetails(code_uri=self.path, is_anonymous=self._is_anonymous, properties=properties)
+        code_version_resource = CodeVersionData(properties=code_version)
+
+        return code_version_resource
+
+    def _update_path(self, asset_artifact: ArtifactStorageInfo) -> None:
+        """Update an artifact with the remote path of a local upload.
+
+        :param asset_artifact: The asset storage info of the artifact
+        :type asset_artifact: ArtifactStorageInfo
+        """
+        if asset_artifact.is_file:
+            # Code paths cannot be pointers to single files. It must be a pointer to a container
+            # Skipping the setter to avoid being resolved as a local path
+            self._path = asset_artifact.subdir_path  # pylint: disable=attribute-defined-outside-init
+        else:
+            self._path = asset_artifact.full_storage_path  # pylint: disable=attribute-defined-outside-init
+
+    # pylint: disable=unused-argument
+    def _to_arm_resource_param(self, **kwargs: Any) -> Dict:
+        properties = self._to_rest_object().properties
+
+        return {
+            self._arm_type: {
+                ArmConstants.NAME: self.name,
+                ArmConstants.VERSION: self.version,
+                ArmConstants.PROPERTIES_PARAMETER_NAME: self._serialize.body(properties, "CodeVersionDetails"),
+            }
+        }
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_assets/_artifacts/data.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_assets/_artifacts/data.py
new file mode 100644
index 00000000..710e959a
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_assets/_artifacts/data.py
@@ -0,0 +1,237 @@
+# ---------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# ---------------------------------------------------------
+
+# pylint: disable=protected-access
+
+import os
+import re
+from os import PathLike
+from pathlib import Path
+from typing import Any, Dict, List, Optional, Tuple, Type, Union
+
+from azure.ai.ml._exception_helper import log_and_raise_error
+from azure.ai.ml._restclient.v2023_04_01_preview.models import (
+    DataContainer,
+    DataContainerProperties,
+    DataType,
+    DataVersionBase,
+    DataVersionBaseProperties,
+    MLTableData,
+    UriFileDataVersion,
+    UriFolderDataVersion,
+)
+from azure.ai.ml._schema import DataSchema
+from azure.ai.ml._utils._arm_id_utils import get_arm_id_object_from_id
+from azure.ai.ml._utils.utils import is_url
+from azure.ai.ml.constants._common import BASE_PATH_CONTEXT_KEY, PARAMS_OVERRIDE_KEY, SHORT_URI_FORMAT, AssetTypes
+from azure.ai.ml.entities._assets import Artifact
+from azure.ai.ml.entities._system_data import SystemData
+from azure.ai.ml.entities._util import load_from_dict
+from azure.ai.ml.exceptions import ErrorCategory, ErrorTarget, ValidationErrorType, ValidationException
+
+from .artifact import ArtifactStorageInfo
+
+DataAssetTypeModelMap: Dict[str, Type[DataVersionBaseProperties]] = {
+    AssetTypes.URI_FILE: UriFileDataVersion,
+    AssetTypes.URI_FOLDER: UriFolderDataVersion,
+    AssetTypes.MLTABLE: MLTableData,
+}
+
+
+def getModelForDataAssetType(data_asset_type: str) -> Optional[Type[DataVersionBaseProperties]]:
+    model = DataAssetTypeModelMap.get(data_asset_type)
+    if model is None:
+        msg = "Unknown DataType {}".format(data_asset_type)
+        err = ValidationException(
+            message=msg,
+            no_personal_data_message=msg,
+            error_type=ValidationErrorType.INVALID_VALUE,
+            target=ErrorTarget.DATA,
+            error_category=ErrorCategory.USER_ERROR,
+        )
+        log_and_raise_error(err)
+    return model
+
+
+DataTypeMap: Dict[DataType, str] = {
+    DataType.URI_FILE: AssetTypes.URI_FILE,
+    DataType.URI_FOLDER: AssetTypes.URI_FOLDER,
+    DataType.MLTABLE: AssetTypes.MLTABLE,
+}
+
+
+def getDataAssetType(data_type: DataType) -> str:
+    return DataTypeMap.get(data_type, data_type)  # pass through value if no match found
+
+
+class Data(Artifact):
+    """Data for training and scoring.
+
+    :param name: Name of the resource.
+    :type name: str
+    :param version: Version of the resource.
+    :type version: str
+    :param description: Description of the resource.
+    :type description: str
+    :param tags: Tag dictionary. Tags can be added, removed, and updated.
+    :type tags: dict[str, str]
+    :param properties: The asset property dictionary.
+    :type properties: dict[str, str]
+    :param path: The path to the asset on the datastore. This can be local or remote
+    :type path: str
+    :param type: The type of the asset. Valid values are uri_file, uri_folder, mltable. Defaults to uri_folder.
+    :type type: Literal[AssetTypes.URI_FILE, AssetTypes.URI_FOLDER, AssetTypes.MLTABLE]
+    :param kwargs: A dictionary of additional configuration parameters.
+    :type kwargs: dict
+    """
+
+    def __init__(
+        self,
+        *,
+        name: Optional[str] = None,
+        version: Optional[str] = None,
+        description: Optional[str] = None,
+        tags: Optional[Dict] = None,
+        properties: Optional[Dict] = None,
+        path: Optional[str] = None,  # if type is mltable, the path has to be a folder.
+        type: str = AssetTypes.URI_FOLDER,  # pylint: disable=redefined-builtin
+        **kwargs: Any,
+    ):
+        self._path: Optional[Union[Path, str, PathLike]] = None
+
+        self._skip_validation = kwargs.pop("skip_validation", False)
+        self._mltable_schema_url = kwargs.pop("mltable_schema_url", None)
+        self._referenced_uris = kwargs.pop("referenced_uris", None)
+        self.type = type
+        super().__init__(
+            name=name,
+            version=version,
+            path=path,
+            description=description,
+            tags=tags,
+            properties=properties,
+            **kwargs,
+        )
+        self.path = path
+
+    @property
+    def path(self) -> Optional[Union[Path, str, PathLike]]:
+        return self._path
+
+    @path.setter
+    def path(self, value: str) -> None:
+        # Call the parent setter to resolve the path with base_path if it was a local path
+        # TODO: Bug Item number: 2883424
+        super(Data, type(self)).path.fset(self, value)  # type: ignore
+        if self.type == AssetTypes.URI_FOLDER and self._path is not None and not is_url(self._path):
+            self._path = Path(os.path.join(self._path, ""))
+
+    @classmethod
+    def _load(
+        cls,
+        data: Optional[Dict] = None,
+        yaml_path: Optional[Union[PathLike, str]] = None,
+        params_override: Optional[list] = None,
+        **kwargs: Any,
+    ) -> "Data":
+        data = data or {}
+        params_override = params_override or []
+        context = {
+            BASE_PATH_CONTEXT_KEY: Path(yaml_path).parent if yaml_path else Path("./"),
+            PARAMS_OVERRIDE_KEY: params_override,
+        }
+        data_asset = Data._load_from_dict(yaml_data=data, context=context, **kwargs)
+
+        return data_asset
+
+    @classmethod
+    def _load_from_dict(cls, yaml_data: Dict, context: Dict, **kwargs: Any) -> "Data":
+        return Data(**load_from_dict(DataSchema, yaml_data, context, **kwargs))
+
+    def _to_dict(self) -> Dict:
+        res: dict = DataSchema(context={BASE_PATH_CONTEXT_KEY: "./"}).dump(self)
+        return res
+
+    def _to_container_rest_object(self) -> DataContainer:
+        VersionDetailsClass = getModelForDataAssetType(self.type)
+        return DataContainer(
+            properties=DataContainerProperties(
+                properties=self.properties,
+                tags=self.tags,
+                is_archived=False,
+                data_type=VersionDetailsClass.data_type if VersionDetailsClass is not None else None,
+            )
+        )
+
+    def _to_rest_object(self) -> Optional[DataVersionBase]:
+        VersionDetailsClass = getModelForDataAssetType(self.type)
+        if VersionDetailsClass is not None:
+            data_version_details = VersionDetailsClass(
+                description=self.description,
+                is_anonymous=self._is_anonymous,
+                tags=self.tags,
+                is_archived=False,
+                properties=self.properties,
+                data_uri=self.path,
+                auto_delete_setting=self.auto_delete_setting,
+            )
+            if VersionDetailsClass._attribute_map.get("referenced_uris") is not None:
+                data_version_details.referenced_uris = self._referenced_uris
+            return DataVersionBase(properties=data_version_details)
+
+        return None
+
+    @classmethod
+    def _from_container_rest_object(cls, data_container_rest_object: DataContainer) -> "Data":
+        data_rest_object_details: DataContainerProperties = data_container_rest_object.properties
+        data = Data(
+            name=data_container_rest_object.name,
+            creation_context=SystemData._from_rest_object(data_container_rest_object.system_data),
+            tags=data_rest_object_details.tags,
+            properties=data_rest_object_details.properties,
+            type=getDataAssetType(data_rest_object_details.data_type),
+        )
+        data.latest_version = data_rest_object_details.latest_version
+        return data
+
+    @classmethod
+    def _from_rest_object(cls, data_rest_object: DataVersionBase) -> "Data":
+        data_rest_object_details: DataVersionBaseProperties = data_rest_object.properties
+        arm_id_object = get_arm_id_object_from_id(data_rest_object.id)
+        path = data_rest_object_details.data_uri
+        data = Data(
+            id=data_rest_object.id,
+            name=arm_id_object.asset_name,
+            version=arm_id_object.asset_version,
+            path=path,
+            type=getDataAssetType(data_rest_object_details.data_type),
+            description=data_rest_object_details.description,
+            tags=data_rest_object_details.tags,
+            properties=data_rest_object_details.properties,
+            creation_context=SystemData._from_rest_object(data_rest_object.system_data),
+            is_anonymous=data_rest_object_details.is_anonymous,
+            referenced_uris=getattr(data_rest_object_details, "referenced_uris", None),
+            auto_delete_setting=getattr(data_rest_object_details, "auto_delete_setting", None),
+        )
+        return data
+
+    @classmethod
+    def _resolve_cls_and_type(cls, data: Dict, params_override: Optional[List[Dict]] = None) -> Tuple:
+        from azure.ai.ml.entities._data_import.data_import import DataImport
+
+        if "source" in data:
+            return DataImport, None
+
+        return cls, None
+
+    def _update_path(self, asset_artifact: ArtifactStorageInfo) -> None:
+        regex = r"datastores\/(.+)"
+        # datastore_arm_id is null for registry scenario, so capture the full_storage_path
+        if not asset_artifact.datastore_arm_id and asset_artifact.full_storage_path:
+            self.path = asset_artifact.full_storage_path
+        else:
+            groups = re.search(regex, asset_artifact.datastore_arm_id)  # type: ignore
+            if groups:
+                datastore_name = groups.group(1)
+                self.path = SHORT_URI_FORMAT.format(datastore_name, asset_artifact.relative_path)
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_assets/_artifacts/feature_set.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_assets/_artifacts/feature_set.py
new file mode 100644
index 00000000..a5bb73fe
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_assets/_artifacts/feature_set.py
@@ -0,0 +1,220 @@
+# ---------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# ---------------------------------------------------------
+
+# pylint: disable=protected-access
+
+from os import PathLike
+from pathlib import Path
+from typing import IO, Any, AnyStr, Dict, List, Optional, Union, cast
+
+from azure.ai.ml._restclient.v2023_10_01.models import (
+    FeaturesetContainer,
+    FeaturesetContainerProperties,
+    FeaturesetVersion,
+    FeaturesetVersionProperties,
+)
+from azure.ai.ml._schema._feature_set.feature_set_schema import FeatureSetSchema
+from azure.ai.ml._utils._arm_id_utils import AMLNamedArmId, get_arm_id_object_from_id
+from azure.ai.ml.constants._common import BASE_PATH_CONTEXT_KEY, LONG_URI_FORMAT, PARAMS_OVERRIDE_KEY
+from azure.ai.ml.entities._assets import Artifact
+from azure.ai.ml.entities._feature_set.feature_set_specification import FeatureSetSpecification
+from azure.ai.ml.entities._feature_set.materialization_settings import MaterializationSettings
+from azure.ai.ml.entities._util import load_from_dict
+from azure.ai.ml.exceptions import ErrorCategory, ErrorTarget, ValidationErrorType, ValidationException
+
+from .artifact import ArtifactStorageInfo
+
+
+class FeatureSet(Artifact):
+    """Feature Set
+
+    :param name: The name of the Feature Set resource.
+    :type name: str
+    :param version: The version of the Feature Set resource.
+    :type version: str
+    :param entities: Specifies list of entities.
+    :type entities: list[str]
+    :param specification: Specifies the feature set spec details.
+    :type specification: ~azure.ai.ml.entities.FeatureSetSpecification
+    :param stage: Feature set stage. Allowed values: Development, Production, Archived. Defatuls to Development.
+    :type stage: Optional[str]
+    :param description: The description of the Feature Set resource. Defaults to None.
+    :type description: Optional[str]
+    :param tags: Tag dictionary. Tags can be added, removed, and updated. Defaults to None.
+    :type tags: Optional[dict[str, str]]
+    :param materialization_settings: Specifies the materialization settings. Defaults to None.
+    :type materialization_settings: Optional[~azure.ai.ml.entities.MaterializationSettings]
+    :param kwargs: A dictionary of additional configuration parameters.
+    :type kwargs: dict
+    :raises ValidationException: Raised if stage is specified and is not valid.
+
+    .. admonition:: Example:
+
+        .. literalinclude:: ../samples/ml_samples_featurestore.py
+            :start-after: [START configure_feature_set]
+            :end-before: [END configure_feature_set]
+            :language: Python
+            :dedent: 8
+            :caption: Instantiating a Feature Set object
+    """
+
+    def __init__(
+        self,
+        *,
+        name: str,
+        version: str,
+        entities: List[str],
+        specification: Optional[FeatureSetSpecification],
+        stage: Optional[str] = "Development",
+        description: Optional[str] = None,
+        materialization_settings: Optional[MaterializationSettings] = None,
+        tags: Optional[Dict] = None,
+        **kwargs: Any,
+    ) -> None:
+        super().__init__(
+            name=name,
+            version=version,
+            description=description,
+            tags=tags,
+            path=specification.path if specification is not None else None,
+            **kwargs,
+        )
+        if stage and stage not in ["Development", "Production", "Archived"]:
+            msg = f"Stage must be Development, Production, or Archived, found {stage}"
+            raise ValidationException(
+                message=msg,
+                no_personal_data_message=msg,
+                error_type=ValidationErrorType.INVALID_VALUE,
+                target=ErrorTarget.FEATURE_SET,
+                error_category=ErrorCategory.USER_ERROR,
+            )
+        self.entities = entities
+        self.specification = specification
+        self.stage = stage
+        self.materialization_settings = materialization_settings
+        self.latest_version = None
+
+    def _to_rest_object(self) -> FeaturesetVersion:
+        featureset_version_properties = FeaturesetVersionProperties(
+            description=self.description,
+            properties=self.properties,
+            tags=self.tags,
+            entities=self.entities,
+            materialization_settings=(
+                self.materialization_settings._to_rest_object() if self.materialization_settings else None
+            ),
+            specification=self.specification._to_rest_object() if self.specification is not None else None,
+            stage=self.stage,
+        )
+        return FeaturesetVersion(name=self.name, properties=featureset_version_properties)
+
+    @classmethod
+    def _from_rest_object(cls, featureset_rest_object: FeaturesetVersion) -> Optional["FeatureSet"]:
+        if not featureset_rest_object:
+            return None
+        featureset_rest_object_details: FeaturesetVersionProperties = featureset_rest_object.properties
+        arm_id_object = get_arm_id_object_from_id(featureset_rest_object.id)
+        featureset = FeatureSet(
+            id=featureset_rest_object.id,
+            name=arm_id_object.asset_name,
+            version=arm_id_object.asset_version,
+            description=featureset_rest_object_details.description,
+            tags=featureset_rest_object_details.tags,
+            entities=featureset_rest_object_details.entities,
+            materialization_settings=MaterializationSettings._from_rest_object(
+                featureset_rest_object_details.materialization_settings
+            ),
+            specification=FeatureSetSpecification._from_rest_object(featureset_rest_object_details.specification),
+            stage=featureset_rest_object_details.stage,
+            properties=featureset_rest_object_details.properties,
+        )
+        return featureset
+
+    @classmethod
+    def _from_container_rest_object(cls, rest_obj: FeaturesetContainer) -> "FeatureSet":
+        rest_object_details: FeaturesetContainerProperties = rest_obj.properties
+        arm_id_object = get_arm_id_object_from_id(rest_obj.id)
+        featureset = FeatureSet(
+            name=arm_id_object.asset_name,
+            description=rest_object_details.description,
+            tags=rest_object_details.tags,
+            entities=[],
+            specification=FeatureSetSpecification(),
+            version="",
+        )
+        featureset.latest_version = rest_object_details.latest_version
+        return featureset
+
+    @classmethod
+    def _load(
+        cls,
+        data: Optional[Dict] = None,
+        yaml_path: Optional[Union[PathLike, str]] = None,
+        params_override: Optional[list] = None,
+        **kwargs: Any,
+    ) -> "FeatureSet":
+        data = data or {}
+        params_override = params_override or []
+        base_path = Path(yaml_path).parent if yaml_path else Path("./")
+        context = {
+            BASE_PATH_CONTEXT_KEY: base_path,
+            PARAMS_OVERRIDE_KEY: params_override,
+        }
+        loaded_schema = load_from_dict(FeatureSetSchema, data, context, **kwargs)
+        feature_set = FeatureSet(base_path=base_path, **loaded_schema)
+        return feature_set
+
+    def _to_dict(self) -> Dict:
+        return dict(FeatureSetSchema(context={BASE_PATH_CONTEXT_KEY: "./"}).dump(self))
+
+    def _update_path(self, asset_artifact: ArtifactStorageInfo) -> None:
+        # if datastore_arm_id is null, capture the full_storage_path
+        if not asset_artifact.datastore_arm_id and asset_artifact.full_storage_path:
+            self.path = asset_artifact.full_storage_path
+        else:
+            aml_datastore_id = AMLNamedArmId(asset_artifact.datastore_arm_id)
+            self.path = LONG_URI_FORMAT.format(
+                aml_datastore_id.subscription_id,
+                aml_datastore_id.resource_group_name,
+                aml_datastore_id.workspace_name,
+                aml_datastore_id.asset_name,
+                asset_artifact.relative_path,
+            )
+
+            if self.specification is not None:
+                self.specification.path = self.path
+
+    def dump(self, dest: Union[str, PathLike, IO[AnyStr]], **kwargs: Any) -> None:
+        """Dump the asset content into a file in YAML format.
+
+        :param dest: The local path or file stream to write the YAML content to.
+            If dest is a file path, a new file will be created.
+            If dest is an open file, the file will be written to directly.
+        :type dest: Union[PathLike, str, IO[AnyStr]]
+        :raises FileExistsError: Raised if dest is a file path and the file already exists.
+        :raises IOError: Raised if dest is an open file and the file is not writable.
+        """
+
+        import os
+        import shutil
+
+        from azure.ai.ml._utils.utils import is_url
+
+        origin_spec_path = self.specification.path if self.specification is not None else None
+        if isinstance(dest, (PathLike, str)) and self.specification is not None and not is_url(self.specification.path):
+            if os.path.exists(dest):
+                raise FileExistsError(f"File {dest} already exists.")
+            relative_path = os.path.basename(cast(PathLike, self.specification.path))
+            src_spec_path = (
+                str(Path(self._base_path, self.specification.path)) if self.specification.path is not None else ""
+            )
+            dest_spec_path = str(Path(os.path.dirname(dest), relative_path))
+            if os.path.exists(dest_spec_path):
+                shutil.rmtree(dest_spec_path)
+            shutil.copytree(src=src_spec_path, dst=dest_spec_path)
+            self.specification.path = str(Path("./", relative_path))
+        super().dump(dest=dest, **kwargs)
+
+        if self.specification is not None:
+            self.specification.path = origin_spec_path
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_assets/_artifacts/index.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_assets/_artifacts/index.py
new file mode 100644
index 00000000..35f671d3
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_assets/_artifacts/index.py
@@ -0,0 +1,137 @@
+# ---------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# ---------------------------------------------------------
+from os import PathLike
+from pathlib import Path
+from typing import Any, Dict, Optional, Union, cast
+
+# cspell:disable-next-line
+from azure.ai.ml._restclient.azure_ai_assets_v2024_04_01.azureaiassetsv20240401.models import Index as RestIndex
+from azure.ai.ml._schema import IndexAssetSchema
+from azure.ai.ml._utils._arm_id_utils import AMLAssetId, AMLNamedArmId
+from azure.ai.ml._utils._experimental import experimental
+from azure.ai.ml.constants._common import BASE_PATH_CONTEXT_KEY, LONG_URI_FORMAT, PARAMS_OVERRIDE_KEY
+from azure.ai.ml.entities._assets import Artifact
+from azure.ai.ml.entities._assets._artifacts.artifact import ArtifactStorageInfo
+from azure.ai.ml.entities._system_data import RestSystemData, SystemData
+from azure.ai.ml.entities._util import load_from_dict
+
+
+@experimental
+class Index(Artifact):
+    """Index asset.
+
+    :ivar name: Name of the resource.
+    :vartype name: str
+    :ivar version: Version of the resource.
+    :vartype version: str
+    :ivar id: Fully qualified resource Id:
+     azureml://workspace/{workspaceName}/indexes/{name}/versions/{version} of the index. Required.
+    :vartype id: str
+    :ivar stage: Update stage to 'Archive' for soft delete. Default is Development, which means the
+     asset is under development. Required.
+    :vartype stage: str
+    :ivar description: Description information of the asset.
+    :vartype description: Optional[str]
+    :ivar tags: Asset's tags.
+    :vartype tags: Optional[dict[str, str]]
+    :ivar properties: Asset's properties.
+    :vartype properties: Optional[dict[str, str]]
+    :ivar path: The local or remote path to the asset.
+    :vartype path: Optional[Union[str, os.PathLike]]
+    """
+
+    def __init__(
+        self,
+        *,
+        name: str,
+        version: Optional[str] = None,
+        stage: str = "Development",
+        description: Optional[str] = None,
+        tags: Optional[Dict[str, str]] = None,
+        properties: Optional[Dict[str, str]] = None,
+        path: Optional[Union[str, PathLike]] = None,
+        datastore: Optional[str] = None,
+        **kwargs: Any,
+    ):
+        self.stage = stage
+        super().__init__(
+            name=name,
+            version=version,
+            description=description,
+            tags=tags,
+            properties=properties,
+            path=path,
+            datastore=datastore,
+            **kwargs,
+        )
+
+    @classmethod
+    def _from_rest_object(cls, index_rest_object: RestIndex) -> "Index":
+        """Convert the response from the Index API into a Index
+
+        :param RestIndex index_rest_object:
+        :return: An Index Asset
+        :rtype: Index
+        """
+        asset_id = AMLAssetId(asset_id=index_rest_object.id)
+
+        return Index(
+            id=index_rest_object.id,
+            name=asset_id.asset_name,
+            version=asset_id.asset_version,
+            description=index_rest_object.description,
+            tags=index_rest_object.tags,
+            properties=index_rest_object.properties,
+            stage=index_rest_object.stage,
+            path=index_rest_object.storage_uri,
+            # pylint: disable-next=protected-access
+            creation_context=SystemData._from_rest_object(
+                RestSystemData.from_dict(index_rest_object.system_data.as_dict())
+            ),
+        )
+
+    def _to_rest_object(self) -> RestIndex:
+        # Note: Index.name and Index.version get dropped going to RestIndex, since both are encoded in the id
+        #       (when present)
+        return RestIndex(
+            stage=self.stage,
+            storage_uri=self.path,
+            description=self.description,
+            tags=self.tags,
+            properties=self.properties,
+            id=self.id,
+        )
+
+    @classmethod
+    def _load(
+        cls,
+        data: Optional[Dict] = None,
+        yaml_path: Optional[Union[PathLike, str]] = None,
+        params_override: Optional[list] = None,
+        **kwargs: Any,
+    ) -> "Index":
+        data = data or {}
+        params_override = params_override or []
+        context = {
+            BASE_PATH_CONTEXT_KEY: Path(yaml_path).parent if yaml_path else Path("./"),
+            PARAMS_OVERRIDE_KEY: params_override,
+        }
+        return cast(Index, load_from_dict(IndexAssetSchema, data, context, **kwargs))
+
+    def _to_dict(self) -> Dict:
+        return cast(dict, IndexAssetSchema(context={BASE_PATH_CONTEXT_KEY: "./"}).dump(self))
+
+    def _update_path(self, asset_artifact: ArtifactStorageInfo) -> None:
+        """Updates an an artifact with the remote path of a local upload.
+
+        :param ArtifactStorageInfo asset_artifact: The asset storage info of the artifact
+        """
+        aml_datastore_id = AMLNamedArmId(asset_artifact.datastore_arm_id)
+        self.path = LONG_URI_FORMAT.format(
+            aml_datastore_id.subscription_id,
+            aml_datastore_id.resource_group_name,
+            aml_datastore_id.workspace_name,
+            aml_datastore_id.asset_name,
+            asset_artifact.relative_path,
+        )
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_assets/_artifacts/model.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_assets/_artifacts/model.py
new file mode 100644
index 00000000..8e65bd3e
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_assets/_artifacts/model.py
@@ -0,0 +1,219 @@
+# ---------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# ---------------------------------------------------------
+from os import PathLike
+from pathlib import Path
+from typing import Any, Dict, Optional, Union
+
+from azure.ai.ml._restclient.v2023_04_01_preview.models import (
+    FlavorData,
+    ModelContainer,
+    ModelVersion,
+    ModelVersionProperties,
+)
+from azure.ai.ml._schema import ModelSchema
+from azure.ai.ml._utils._arm_id_utils import AMLNamedArmId, AMLVersionedArmId
+from azure.ai.ml._utils._asset_utils import get_ignore_file, get_object_hash
+from azure.ai.ml.constants._common import (
+    BASE_PATH_CONTEXT_KEY,
+    LONG_URI_FORMAT,
+    PARAMS_OVERRIDE_KEY,
+    ArmConstants,
+    AssetTypes,
+)
+from azure.ai.ml.entities._assets import Artifact
+from azure.ai.ml.entities._assets.intellectual_property import IntellectualProperty
+from azure.ai.ml.entities._system_data import SystemData
+from azure.ai.ml.entities._util import get_md5_string, load_from_dict
+
+from .artifact import ArtifactStorageInfo
+
+
+class Model(Artifact):  # pylint: disable=too-many-instance-attributes
+    """Model for training and scoring.
+
+    :param name: The name of the model. Defaults to a random GUID.
+    :type name: Optional[str]
+    :param version: The version of the model. Defaults to "1" if either no name or an unregistered name is provided.
+        Otherwise, defaults to autoincrement from the last registered version of the model with that name.
+    :type version: Optional[str]
+    :param type: The storage format for this entity, used for NCD (Novel Class Discovery). Accepted values are
+        "custom_model", "mlflow_model", or "triton_model". Defaults to "custom_model".
+    :type type: Optional[str]
+    :param utc_time_created: The date and time when the model was created, in
+        UTC ISO 8601 format. (e.g. '2020-10-19 17:44:02.096572').
+    :type utc_time_created: Optional[str]
+    :param flavors: The flavors in which the model can be interpreted. Defaults to None.
+    :type flavors: Optional[dict[str, Any]]
+    :param path: A remote uri or a local path pointing to a model. Defaults to None.
+    :type path: Optional[str]
+    :param description: The description of the resource. Defaults to None
+    :type description: Optional[str]
+    :param tags: Tag dictionary. Tags can be added, removed, and updated. Defaults to None.
+    :type tags: Optional[dict[str, str]]
+    :param properties: The asset property dictionary. Defaults to None.
+    :type properties: Optional[dict[str, str]]
+    :param stage: The stage of the resource. Defaults to None.
+    :type stage: Optional[str]
+    :param kwargs: A dictionary of additional configuration parameters.
+    :type kwargs: Optional[dict]
+
+    .. admonition:: Example:
+
+        .. literalinclude:: ../samples/ml_samples_misc.py
+            :start-after: [START model_entity_create]
+            :end-before: [END model_entity_create]
+            :language: python
+            :dedent: 8
+            :caption: Creating a Model object.
+    """
+
+    def __init__(
+        self,
+        *,
+        name: Optional[str] = None,
+        version: Optional[str] = None,
+        type: Optional[str] = None,  # pylint: disable=redefined-builtin
+        path: Optional[Union[str, PathLike]] = None,
+        utc_time_created: Optional[str] = None,
+        flavors: Optional[Dict[str, Dict[str, Any]]] = None,
+        description: Optional[str] = None,
+        tags: Optional[Dict] = None,
+        properties: Optional[Dict] = None,
+        stage: Optional[str] = None,
+        **kwargs: Any,
+    ) -> None:
+        self.job_name = kwargs.pop("job_name", None)
+        self._intellectual_property = kwargs.pop("intellectual_property", None)
+        self._system_metadata = kwargs.pop("system_metadata", None)
+        super().__init__(
+            name=name,
+            version=version,
+            path=path,
+            description=description,
+            tags=tags,
+            properties=properties,
+            **kwargs,
+        )
+        self.utc_time_created = utc_time_created
+        self.flavors = dict(flavors) if flavors else None
+        self._arm_type = ArmConstants.MODEL_VERSION_TYPE
+        self.type = type or AssetTypes.CUSTOM_MODEL
+        self.stage = stage
+        if self._is_anonymous and self.path:
+            _ignore_file = get_ignore_file(self.path)
+            _upload_hash = get_object_hash(self.path, _ignore_file)
+            self.name = get_md5_string(_upload_hash)
+
+    @classmethod
+    def _load(
+        cls,
+        data: Optional[Dict] = None,
+        yaml_path: Optional[Union[PathLike, str]] = None,
+        params_override: Optional[list] = None,
+        **kwargs: Any,
+    ) -> "Model":
+        params_override = params_override or []
+        data = data or {}
+        context = {
+            BASE_PATH_CONTEXT_KEY: Path(yaml_path).parent if yaml_path else Path("./"),
+            PARAMS_OVERRIDE_KEY: params_override,
+        }
+        res: Model = load_from_dict(ModelSchema, data, context, **kwargs)
+        return res
+
+    def _to_dict(self) -> Dict:
+        return dict(ModelSchema(context={BASE_PATH_CONTEXT_KEY: "./"}).dump(self))
+
+    @classmethod
+    def _from_rest_object(cls, model_rest_object: ModelVersion) -> "Model":
+        rest_model_version: ModelVersionProperties = model_rest_object.properties
+        arm_id = AMLVersionedArmId(arm_id=model_rest_object.id)
+        model_stage = rest_model_version.stage if hasattr(rest_model_version, "stage") else None
+        model_system_metadata = (
+            rest_model_version.system_metadata if hasattr(rest_model_version, "system_metadata") else None
+        )
+        if hasattr(rest_model_version, "flavors"):
+            flavors = {key: flavor.data for key, flavor in rest_model_version.flavors.items()}
+        model = Model(
+            id=model_rest_object.id,
+            name=arm_id.asset_name,
+            version=arm_id.asset_version,
+            path=rest_model_version.model_uri,
+            description=rest_model_version.description,
+            tags=rest_model_version.tags,
+            flavors=flavors,  # pylint: disable=possibly-used-before-assignment
+            properties=rest_model_version.properties,
+            stage=model_stage,
+            # pylint: disable=protected-access
+            creation_context=SystemData._from_rest_object(model_rest_object.system_data),
+            type=rest_model_version.model_type,
+            job_name=rest_model_version.job_name,
+            intellectual_property=(
+                IntellectualProperty._from_rest_object(rest_model_version.intellectual_property)
+                if rest_model_version.intellectual_property
+                else None
+            ),
+            system_metadata=model_system_metadata,
+        )
+        return model
+
+    @classmethod
+    def _from_container_rest_object(cls, model_container_rest_object: ModelContainer) -> "Model":
+        model = Model(
+            name=model_container_rest_object.name,
+            version="1",
+            id=model_container_rest_object.id,
+            # pylint: disable=protected-access
+            creation_context=SystemData._from_rest_object(model_container_rest_object.system_data),
+        )
+        model.latest_version = model_container_rest_object.properties.latest_version
+
+        # Setting version to None since if version is not provided it is defaulted to "1".
+        # This should go away once container concept is finalized.
+        model.version = None
+        return model
+
+    def _to_rest_object(self) -> ModelVersion:
+        model_version = ModelVersionProperties(
+            description=self.description,
+            tags=self.tags,
+            properties=self.properties,
+            flavors=(
+                {key: FlavorData(data=dict(value)) for key, value in self.flavors.items()} if self.flavors else None
+            ),  # flatten OrderedDict to dict
+            model_type=self.type,
+            model_uri=self.path,
+            stage=self.stage,
+            is_anonymous=self._is_anonymous,
+        )
+        model_version.system_metadata = self._system_metadata if hasattr(self, "_system_metadata") else None
+
+        model_version_resource = ModelVersion(properties=model_version)
+
+        return model_version_resource
+
+    def _update_path(self, asset_artifact: ArtifactStorageInfo) -> None:
+        # datastore_arm_id is null for registry scenario, so capture the full_storage_path
+        if not asset_artifact.datastore_arm_id and asset_artifact.full_storage_path:
+            self.path = asset_artifact.full_storage_path
+        else:
+            aml_datastore_id = AMLNamedArmId(asset_artifact.datastore_arm_id)
+            self.path = LONG_URI_FORMAT.format(
+                aml_datastore_id.subscription_id,
+                aml_datastore_id.resource_group_name,
+                aml_datastore_id.workspace_name,
+                aml_datastore_id.asset_name,
+                asset_artifact.relative_path,
+            )
+
+    def _to_arm_resource_param(self, **kwargs: Any) -> Dict:  # pylint: disable=unused-argument
+        properties = self._to_rest_object().properties
+
+        return {
+            self._arm_type: {
+                ArmConstants.NAME: self.name,
+                ArmConstants.VERSION: self.version,
+                ArmConstants.PROPERTIES_PARAMETER_NAME: self._serialize.body(properties, "ModelVersionProperties"),
+            }
+        }
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_assets/asset.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_assets/asset.py
new file mode 100644
index 00000000..b6ee2b55
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_assets/asset.py
@@ -0,0 +1,145 @@
+# ---------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# ---------------------------------------------------------
+
+import uuid
+from abc import abstractmethod
+from os import PathLike
+from typing import IO, Any, AnyStr, Dict, Optional, Union
+
+from azure.ai.ml._exception_helper import log_and_raise_error
+from azure.ai.ml._utils.utils import dump_yaml_to_file
+from azure.ai.ml.entities._resource import Resource
+from azure.ai.ml.exceptions import ErrorCategory, ErrorTarget, ValidationErrorType, ValidationException
+
+
+class Asset(Resource):
+    """Base class for asset.
+
+    This class should not be instantiated directly. Instead, use one of its subclasses.
+
+    :param name: The name of the asset. Defaults to a random GUID.
+    :type name: Optional[str]]
+    :param version: The version of the asset. Defaults to "1" if no name is provided, otherwise defaults to
+        autoincrement from the last registered version of the asset with that name. For a model name that has
+        never been registered, a default version will be assigned.
+    :type version: Optional[str]
+    :param description: The description of the resource. Defaults to None.
+    :type description: Optional[str]
+    :param tags: Tag dictionary. Tags can be added, removed, and updated. Defaults to None.
+    :type tags: Optional[dict[str, str]]
+    :param properties: The asset property dictionary. Defaults to None.
+    :type properties: Optional[dict[str, str]]
+    :keyword kwargs: A dictionary of additional configuration parameters.
+    :paramtype kwargs: Optional[dict]
+    """
+
+    def __init__(
+        self,
+        name: Optional[str] = None,
+        version: Optional[str] = None,
+        description: Optional[str] = None,
+        tags: Optional[Dict] = None,
+        properties: Optional[Dict] = None,
+        **kwargs: Any,
+    ) -> None:
+        self._is_anonymous = kwargs.pop("is_anonymous", False)
+        self._auto_increment_version = kwargs.pop("auto_increment_version", False)
+        self.auto_delete_setting = kwargs.pop("auto_delete_setting", None)
+
+        if not name and version is None:
+            name = _get_random_name()
+            version = "1"
+            self._is_anonymous = True
+        elif version is not None and not name:
+            msg = "If version is specified, name must be specified also."
+            err = ValidationException(
+                message=msg,
+                target=ErrorTarget.ASSET,
+                no_personal_data_message=msg,
+                error_category=ErrorCategory.USER_ERROR,
+                error_type=ValidationErrorType.MISSING_FIELD,
+            )
+            log_and_raise_error(err)
+
+        super().__init__(
+            name=name,
+            description=description,
+            tags=tags,
+            properties=properties,
+            **kwargs,
+        )
+
+        self.version = version
+        self.latest_version = None
+
+    @abstractmethod
+    def _to_dict(self) -> Dict:
+        """Dump the artifact content into a pure dict object."""
+
+    @property
+    def version(self) -> Optional[str]:
+        """The asset version.
+
+        :return: The asset version.
+        :rtype: str
+        """
+        return self._version
+
+    @version.setter
+    def version(self, value: str) -> None:
+        """Sets the asset version.
+
+        :param value: The asset version.
+        :type value: str
+        :raises ValidationException: Raised if value is not a string.
+        """
+        if value:
+            if not isinstance(value, str):
+                msg = f"Asset version must be a string, not type {type(value)}."
+                err = ValidationException(
+                    message=msg,
+                    target=ErrorTarget.ASSET,
+                    no_personal_data_message=msg,
+                    error_category=ErrorCategory.USER_ERROR,
+                    error_type=ValidationErrorType.INVALID_VALUE,
+                )
+                log_and_raise_error(err)
+
+        self._version = value
+        self._auto_increment_version = self.name and not self._version
+
+    def dump(self, dest: Union[str, PathLike, IO[AnyStr]], **kwargs: Any) -> None:
+        """Dump the asset content into a file in YAML format.
+
+        :param dest: The local path or file stream to write the YAML content to.
+            If dest is a file path, a new file will be created.
+            If dest is an open file, the file will be written to directly.
+        :type dest: Union[PathLike, str, IO[AnyStr]]
+        :raises FileExistsError: Raised if dest is a file path and the file already exists.
+        :raises IOError: Raised if dest is an open file and the file is not writable.
+        """
+        path = kwargs.pop("path", None)
+        yaml_serialized = self._to_dict()
+        dump_yaml_to_file(dest, yaml_serialized, default_flow_style=False, path=path, **kwargs)
+
+    def __eq__(self, other: Any) -> bool:
+        return bool(
+            self.name == other.name
+            and self.id == other.id
+            and self.version == other.version
+            and self.description == other.description
+            and self.tags == other.tags
+            and self.properties == other.properties
+            and self.base_path == other.base_path
+            and self._is_anonymous == other._is_anonymous
+            and self._auto_increment_version == other._auto_increment_version
+            and self.auto_delete_setting == other.auto_delete_setting
+        )
+
+    def __ne__(self, other: Any) -> bool:
+        return not self.__eq__(other)
+
+
+def _get_random_name() -> str:
+    return str(uuid.uuid4())
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_assets/auto_delete_setting.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_assets/auto_delete_setting.py
new file mode 100644
index 00000000..ea6bf9e8
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_assets/auto_delete_setting.py
@@ -0,0 +1,42 @@
+# ---------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# ---------------------------------------------------------
+
+from typing import Any, Union
+
+from azure.ai.ml._restclient.v2023_04_01_preview.models import AutoDeleteSetting as RestAutoDeleteSetting
+from azure.ai.ml._utils._experimental import experimental
+from azure.ai.ml.constants._common import AutoDeleteCondition
+from azure.ai.ml.entities._mixins import DictMixin
+
+
+@experimental
+class AutoDeleteSetting(DictMixin):
+    """Class which defines the auto delete setting.
+    :param condition: When to check if an asset is expired.
+     Possible values include: "CreatedGreaterThan", "LastAccessedGreaterThan".
+    :type condition: AutoDeleteCondition
+    :param value: Expiration condition value.
+    :type value: str
+    """
+
+    def __init__(
+        self,
+        *,
+        condition: AutoDeleteCondition = AutoDeleteCondition.CREATED_GREATER_THAN,
+        value: Union[str, None] = None
+    ):
+        self.condition = condition
+        self.value = value
+
+    def _to_rest_object(self) -> RestAutoDeleteSetting:
+        return RestAutoDeleteSetting(condition=self.condition, value=self.value)
+
+    @classmethod
+    def _from_rest_object(cls, obj: RestAutoDeleteSetting) -> "AutoDeleteSetting":
+        return cls(condition=obj.condition, value=obj.value)
+
+    def __eq__(self, other: Any) -> bool:
+        if not isinstance(other, AutoDeleteSetting):
+            return NotImplemented
+        return self.condition == other.condition and self.value == other.value
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_assets/environment.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_assets/environment.py
new file mode 100644
index 00000000..865273fb
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_assets/environment.py
@@ -0,0 +1,478 @@
+# ---------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# ---------------------------------------------------------
+
+# pylint: disable=protected-access, too-many-instance-attributes
+
+import os
+from pathlib import Path
+from typing import Any, Dict, Optional, Union
+
+import yaml  # type: ignore[import]
+
+from azure.ai.ml._exception_helper import log_and_raise_error
+from azure.ai.ml._restclient.v2023_04_01_preview.models import BuildContext as RestBuildContext
+from azure.ai.ml._restclient.v2023_04_01_preview.models import (
+    EnvironmentContainer,
+    EnvironmentVersion,
+    EnvironmentVersionProperties,
+)
+from azure.ai.ml._schema import EnvironmentSchema
+from azure.ai.ml._utils._arm_id_utils import AMLVersionedArmId
+from azure.ai.ml._utils._asset_utils import get_ignore_file, get_object_hash
+from azure.ai.ml._utils.utils import dump_yaml, is_url, load_file, load_yaml
+from azure.ai.ml.constants._common import ANONYMOUS_ENV_NAME, BASE_PATH_CONTEXT_KEY, PARAMS_OVERRIDE_KEY, ArmConstants
+from azure.ai.ml.entities._assets.asset import Asset
+from azure.ai.ml.entities._assets.intellectual_property import IntellectualProperty
+from azure.ai.ml.entities._mixins import LocalizableMixin
+from azure.ai.ml.entities._system_data import SystemData
+from azure.ai.ml.entities._util import get_md5_string, load_from_dict
+from azure.ai.ml.exceptions import ErrorCategory, ErrorTarget, ValidationErrorType, ValidationException
+
+
+class BuildContext:
+    """Docker build context for Environment.
+
+    :param path: The local or remote path to the the docker build context directory.
+    :type path: Union[str, os.PathLike]
+    :param dockerfile_path: The path to the dockerfile relative to root of docker build context directory.
+    :type dockerfile_path: str
+
+    .. admonition:: Example:
+
+        .. literalinclude:: ../samples/ml_samples_misc.py
+            :start-after: [START build_context_entity_create]
+            :end-before: [END build_context_entity_create]
+            :language: python
+            :dedent: 8
+            :caption: Create a Build Context object.
+    """
+
+    def __init__(
+        self,
+        *,
+        dockerfile_path: Optional[str] = None,
+        path: Optional[Union[str, os.PathLike]] = None,
+    ):
+        self.dockerfile_path = dockerfile_path
+        self.path = path
+
+    def _to_rest_object(self) -> RestBuildContext:
+        return RestBuildContext(context_uri=self.path, dockerfile_path=self.dockerfile_path)
+
+    @classmethod
+    def _from_rest_object(cls, rest_obj: RestBuildContext) -> "BuildContext":
+        return BuildContext(
+            path=rest_obj.context_uri,
+            dockerfile_path=rest_obj.dockerfile_path,
+        )
+
+    def __eq__(self, other: Any) -> bool:
+        res: bool = self.dockerfile_path == other.dockerfile_path and self.path == other.path
+        return res
+
+    def __ne__(self, other: Any) -> bool:
+        return not self.__eq__(other)
+
+
+class Environment(Asset, LocalizableMixin):
+    """Environment for training.
+
+    :param name: Name of the resource.
+    :type name: str
+    :param version: Version of the asset.
+    :type version: str
+    :param description: Description of the resource.
+    :type description: str
+    :param image: URI of a custom base image.
+    :type image: str
+    :param build: Docker build context to create the environment. Mutually exclusive with "image"
+    :type build: ~azure.ai.ml.entities._assets.environment.BuildContext
+    :param conda_file: Path to configuration file listing conda packages to install.
+    :type conda_file: typing.Union[str, os.PathLike]
+    :param tags: Tag dictionary. Tags can be added, removed, and updated.
+    :type tags: dict[str, str]
+    :param properties: The asset property dictionary.
+    :type properties: dict[str, str]
+    :param datastore: The datastore to upload the local artifact to.
+    :type datastore: str
+    :param kwargs: A dictionary of additional configuration parameters.
+    :type kwargs: dict
+
+    .. admonition:: Example:
+
+        .. literalinclude:: ../samples/ml_samples_misc.py
+            :start-after: [START env_entity_create]
+            :end-before: [END env_entity_create]
+            :language: python
+            :dedent: 8
+            :caption: Create a Environment object.
+    """
+
+    def __init__(
+        self,
+        *,
+        name: Optional[str] = None,
+        version: Optional[str] = None,
+        description: Optional[str] = None,
+        image: Optional[str] = None,
+        build: Optional[BuildContext] = None,
+        conda_file: Optional[Union[str, os.PathLike, Dict]] = None,
+        tags: Optional[Dict] = None,
+        properties: Optional[Dict] = None,
+        datastore: Optional[str] = None,
+        **kwargs: Any,
+    ):
+        self._arm_type: str = ""
+        self.latest_version: str = ""  # type: ignore[assignment]
+        self.image: Optional[str] = None
+        inference_config = kwargs.pop("inference_config", None)
+        os_type = kwargs.pop("os_type", None)
+        self._intellectual_property = kwargs.pop("intellectual_property", None)
+
+        super().__init__(
+            name=name,
+            version=version,
+            description=description,
+            tags=tags,
+            properties=properties,
+            **kwargs,
+        )
+
+        self.conda_file = conda_file
+        self.image = image
+        self.build = build
+        self.inference_config = inference_config
+        self.os_type = os_type
+        self._arm_type = ArmConstants.ENVIRONMENT_VERSION_TYPE
+        self._conda_file_path = (
+            _resolve_path(base_path=self.base_path, input=conda_file)
+            if isinstance(conda_file, (os.PathLike, str))
+            else None
+        )
+        self.path = None
+        self.datastore = datastore
+        self._upload_hash = None
+
+        self._translated_conda_file = None
+        if self.conda_file:
+            self._translated_conda_file = dump_yaml(self.conda_file, sort_keys=True)  # service needs str representation
+
+        if self.build and self.build.path and not is_url(self.build.path):
+            path = Path(self.build.path)
+            if not path.is_absolute():
+                path = Path(self.base_path, path).resolve()
+            self.path = path
+
+        if self._is_anonymous:
+            if self.path:
+                self._ignore_file = get_ignore_file(path)
+                self._upload_hash = get_object_hash(path, self._ignore_file)
+                self._generate_anonymous_name_version(source="build")
+            elif self.image:
+                self._generate_anonymous_name_version(
+                    source="image", conda_file=self._translated_conda_file, inference_config=self.inference_config
+                )
+
+    @property
+    def conda_file(self) -> Optional[Union[str, os.PathLike, Dict]]:
+        """Conda environment specification.
+
+        :return: Conda dependencies loaded from `conda_file` param.
+        :rtype: Optional[Union[str, os.PathLike]]
+        """
+        return self._conda_file
+
+    @conda_file.setter
+    def conda_file(self, value: Optional[Union[str, os.PathLike, Dict]]) -> None:
+        """Set conda environment specification.
+
+        :param value: A path to a local conda dependencies yaml file or a loaded yaml dictionary of dependencies.
+        :type value: Union[str, os.PathLike, Dict]
+        :return: None
+        """
+        if not isinstance(value, Dict):
+            value = _deserialize(self.base_path, value, is_conda=True)
+        self._conda_file = value
+
+    @classmethod
+    def _load(
+        cls,
+        data: Optional[dict] = None,
+        yaml_path: Optional[Union[os.PathLike, str]] = None,
+        params_override: Optional[list] = None,
+        **kwargs: Any,
+    ) -> "Environment":
+        params_override = params_override or []
+        data = data or {}
+        context = {
+            BASE_PATH_CONTEXT_KEY: Path(yaml_path).parent if yaml_path else Path("./"),
+            PARAMS_OVERRIDE_KEY: params_override,
+        }
+        res: Environment = load_from_dict(EnvironmentSchema, data, context, **kwargs)
+        return res
+
+    def _to_rest_object(self) -> EnvironmentVersion:
+        self.validate()
+        environment_version = EnvironmentVersionProperties()
+        if self.conda_file:
+            environment_version.conda_file = self._translated_conda_file
+        if self.image:
+            environment_version.image = self.image
+        if self.build:
+            environment_version.build = self.build._to_rest_object()
+        if self.os_type:
+            environment_version.os_type = self.os_type
+        if self.tags:
+            environment_version.tags = self.tags
+        if self._is_anonymous:
+            environment_version.is_anonymous = self._is_anonymous
+        if self.inference_config:
+            environment_version.inference_config = self.inference_config
+        if self.description:
+            environment_version.description = self.description
+        if self.properties:
+            environment_version.properties = self.properties
+
+        environment_version_resource = EnvironmentVersion(properties=environment_version)
+
+        return environment_version_resource
+
+    @classmethod
+    def _from_rest_object(cls, env_rest_object: EnvironmentVersion) -> "Environment":
+        rest_env_version = env_rest_object.properties
+        arm_id = AMLVersionedArmId(arm_id=env_rest_object.id)
+
+        environment = Environment(
+            id=env_rest_object.id,
+            name=arm_id.asset_name,
+            version=arm_id.asset_version,
+            description=rest_env_version.description,
+            tags=rest_env_version.tags,
+            creation_context=(
+                SystemData._from_rest_object(env_rest_object.system_data) if env_rest_object.system_data else None
+            ),
+            is_anonymous=rest_env_version.is_anonymous,
+            image=rest_env_version.image,
+            os_type=rest_env_version.os_type,
+            inference_config=rest_env_version.inference_config,
+            build=BuildContext._from_rest_object(rest_env_version.build) if rest_env_version.build else None,
+            properties=rest_env_version.properties,
+            intellectual_property=(
+                IntellectualProperty._from_rest_object(rest_env_version.intellectual_property)
+                if rest_env_version.intellectual_property
+                else None
+            ),
+        )
+
+        if rest_env_version.conda_file:
+            translated_conda_file = yaml.safe_load(rest_env_version.conda_file)
+            environment.conda_file = translated_conda_file
+            environment._translated_conda_file = rest_env_version.conda_file
+
+        return environment
+
+    @classmethod
+    def _from_container_rest_object(cls, env_container_rest_object: EnvironmentContainer) -> "Environment":
+        env = Environment(
+            name=env_container_rest_object.name,
+            version="1",
+            id=env_container_rest_object.id,
+            creation_context=SystemData._from_rest_object(env_container_rest_object.system_data),
+        )
+        env.latest_version = env_container_rest_object.properties.latest_version
+
+        # Setting version to None since if version is not provided it is defaulted to "1".
+        # This should go away once container concept is finalized.
+        env.version = None
+        return env
+
+    def _to_arm_resource_param(self, **kwargs: Any) -> Dict:  # pylint: disable=unused-argument
+        properties = self._to_rest_object().properties
+
+        return {
+            self._arm_type: {
+                ArmConstants.NAME: self.name,
+                ArmConstants.VERSION: self.version,
+                ArmConstants.PROPERTIES_PARAMETER_NAME: self._serialize.body(properties, "EnvironmentVersion"),
+            }
+        }
+
+    def _to_dict(self) -> Dict:
+        res: dict = EnvironmentSchema(context={BASE_PATH_CONTEXT_KEY: "./"}).dump(self)
+        return res
+
+    def validate(self) -> None:
+        """Validate the environment by checking its name, image and build
+
+        .. admonition:: Example:
+
+            .. literalinclude:: ../samples/ml_samples_misc.py
+                :start-after: [START env_entities_validate]
+                :end-before: [END env_entities_validate]
+                :language: python
+                :dedent: 8
+                :caption: Validate environment example.
+        """
+
+        if self.name is None:
+            msg = "Environment name is required"
+            err = ValidationException(
+                message=msg,
+                target=ErrorTarget.ENVIRONMENT,
+                no_personal_data_message=msg,
+                error_category=ErrorCategory.USER_ERROR,
+                error_type=ValidationErrorType.MISSING_FIELD,
+            )
+            log_and_raise_error(err)
+        if self.image is None and self.build is None:
+            msg = "Docker image or Dockerfile is required for environments"
+            err = ValidationException(
+                message=msg,
+                target=ErrorTarget.ENVIRONMENT,
+                no_personal_data_message=msg,
+                error_category=ErrorCategory.USER_ERROR,
+                error_type=ValidationErrorType.MISSING_FIELD,
+            )
+            log_and_raise_error(err)
+        if self.image and self.build:
+            msg = "Docker image or Dockerfile should be provided not both"
+            err = ValidationException(
+                message=msg,
+                target=ErrorTarget.ENVIRONMENT,
+                no_personal_data_message=msg,
+                error_category=ErrorCategory.USER_ERROR,
+                error_type=ValidationErrorType.INVALID_VALUE,
+            )
+            log_and_raise_error(err)
+
+    def __eq__(self, other: object) -> bool:
+        if not isinstance(other, Environment):
+            return NotImplemented
+        return (
+            self.name == other.name
+            and self.id == other.id
+            and self.version == other.version
+            and self.description == other.description
+            and self.tags == other.tags
+            and self.properties == other.properties
+            and self.base_path == other.base_path
+            and self.image == other.image
+            and self.build == other.build
+            and self.conda_file == other.conda_file
+            and self.inference_config == other.inference_config
+            and self._is_anonymous == other._is_anonymous
+            and self.os_type == other.os_type
+            and self._intellectual_property == other._intellectual_property
+        )
+
+    def __ne__(self, other: object) -> bool:
+        return not self.__eq__(other)
+
+    def _generate_anonymous_name_version(
+        self, source: str, conda_file: Optional[str] = None, inference_config: Optional[Dict] = None
+    ) -> None:
+        hash_str = ""
+        if source == "image":
+            hash_str = hash_str.join(get_md5_string(self.image))
+            if inference_config:
+                hash_str = hash_str.join(get_md5_string(yaml.dump(inference_config, sort_keys=True)))
+            if conda_file:
+                hash_str = hash_str.join(get_md5_string(conda_file))
+        if source == "build":
+            if self.build is not None and not self.build.dockerfile_path:
+                hash_str = hash_str.join(get_md5_string(self._upload_hash))
+            else:
+                if self.build is not None:
+                    hash_str = hash_str.join(get_md5_string(self._upload_hash)).join(
+                        get_md5_string(self.build.dockerfile_path)
+                    )
+        version_hash = get_md5_string(hash_str)
+        self.version = version_hash
+        self.name = ANONYMOUS_ENV_NAME
+
+    def _localize(self, base_path: str) -> None:
+        """Called on an asset got from service to clean up remote attributes like id, creation_context, etc. and update
+        base_path.
+
+        :param base_path: The base path
+        :type base_path: str
+        """
+        if not getattr(self, "id", None):
+            raise ValueError("Only remote asset can be localize but got a {} without id.".format(type(self)))
+        self._id = None
+        self._creation_context = None
+        self._base_path = base_path
+        if self._is_anonymous:
+            self.name, self.version = None, None
+
+
+# TODO: Remove _DockerBuild and _DockerConfiguration classes once local endpoint moves to using updated env
+class _DockerBuild:
+    """Helper class to encapsulate Docker build info for Environment."""
+
+    def __init__(
+        self,
+        base_path: Optional[Union[str, os.PathLike]] = None,
+        dockerfile: Optional[str] = None,
+    ):
+        self.dockerfile = _deserialize(base_path, dockerfile)
+
+    @classmethod
+    def _to_rest_object(cls) -> None:
+        return None
+
+    def _from_rest_object(self, rest_obj: Any) -> None:
+        self.dockerfile = rest_obj.dockerfile
+
+    def __eq__(self, other: Any) -> bool:
+        res: bool = self.dockerfile == other.dockerfile
+        return res
+
+    def __ne__(self, other: Any) -> bool:
+        return not self.__eq__(other)
+
+
+def _deserialize(
+    base_path: Optional[Union[str, os.PathLike]],
+    input: Optional[Union[str, os.PathLike, Dict]],  # pylint: disable=redefined-builtin
+    is_conda: bool = False,
+) -> Optional[Union[str, os.PathLike, Dict]]:
+    """Deserialize user input files for conda and docker.
+
+    :param base_path: The base path for all files supplied by user.
+    :type base_path: Union[str, os.PathLike]
+    :param input: Input to be deserialized. Will be either dictionary of file contents or path to file.
+    :type input: Union[str, os.PathLike, Dict[str, str]]
+    :param is_conda: If file is conda file, it will be returned as dictionary
+    :type is_conda: bool
+    :return: The deserialized data
+    :rtype: Union[str, Dict]
+    """
+
+    if input:
+        path = _resolve_path(base_path=base_path, input=input)
+        data: Union[str, Dict] = ""
+        if is_conda:
+            data = load_yaml(path)
+        else:
+            data = load_file(path)
+        return data
+    return input
+
+
+def _resolve_path(base_path: Any, input: Any) -> Path:  # pylint: disable=redefined-builtin
+    """Deserialize user input files for conda and docker.
+
+    :param base_path: The base path for all files supplied by user.
+    :type base_path: Union[str, os.PathLike]
+    :param input: Input to be deserialized. Will be either dictionary of file contents or path to file.
+    :type input: Union[str, os.PathLike, Dict[str, str]]
+    :return: The resolved path
+    :rtype: Path
+    """
+
+    path = Path(input)
+    if not path.is_absolute():
+        path = Path(base_path, path).resolve()
+    return path
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_assets/federated_learning_silo.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_assets/federated_learning_silo.py
new file mode 100644
index 00000000..8255f887
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_assets/federated_learning_silo.py
@@ -0,0 +1,123 @@
+# ---------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# ---------------------------------------------------------
+# TODO determine where this file should live.
+
+from os import PathLike
+from typing import IO, Any, AnyStr, Dict, List, Optional, Union
+
+from azure.ai.ml import Input
+from azure.ai.ml._utils.utils import dump_yaml_to_file, load_yaml
+from azure.ai.ml.constants._common import BASE_PATH_CONTEXT_KEY
+
+
+# Entity representation of a federated learning silo.
+# Used by Federated Learning DSL nodes as inputs for creating
+# FL subgraphs in pipelines.
+# The functionality of this entity is limited, and it exists mostly
+# To simplify the process of loading and validating these objects from YAML.
+class FederatedLearningSilo:
+    def __init__(
+        self,
+        *,
+        compute: str,
+        datastore: str,
+        inputs: Dict[str, Input],
+    ):
+        """
+        A pseudo-entity that represents a federated learning silo, which is an isolated compute with its own
+        datastore and input targets. This is meant to be used in conjunction with the
+        Federated Learning DSL node to create federated learning pipelines. This does NOT represent any specific
+        AML resource, and is instead merely meant to simply client-side experiences with managing FL data distribution.
+        Standard usage involves the "load_list" classmethod to load a list of these objects from YAML, which serves
+        as a necessary input for FL processes.
+
+
+        :param compute: The resource id of a compute.
+        :type compute: str
+        :param datastore: The resource id of a datastore.
+        :type datastore: str
+        :param inputs: A dictionary of input entities that exist in the previously specified datastore.
+            The keys of this dictionary are the keyword names that these inputs should be entered into.
+        :type inputs: dict[str, Input]
+        :param kwargs: A dictionary of additional configuration parameters.
+        :type kwargs: dict
+        """
+        self.compute = compute
+        self.datastore = datastore
+        self.inputs = inputs
+
+    def dump(
+        self,
+        dest: Union[str, PathLike, IO[AnyStr]],
+        # pylint: disable=unused-argument
+        **kwargs: Any,
+    ) -> None:
+        """Dump the Federated Learning Silo spec into a file in yaml format.
+
+        :param dest: Either
+          * A path to a local file
+          * A writeable file-like object
+        :type dest: Union[str, PathLike, IO[AnyStr]]
+        """
+        yaml_serialized = self._to_dict()
+        dump_yaml_to_file(dest, yaml_serialized, default_flow_style=False)
+
+    def _to_dict(self) -> Dict:
+        # JIT import to avoid experimental warnings on unrelated calls
+        from azure.ai.ml._schema.assets.federated_learning_silo import FederatedLearningSiloSchema
+
+        schema = FederatedLearningSiloSchema(context={BASE_PATH_CONTEXT_KEY: "./"})
+
+        return Dict(schema.dump(self))
+
+    @classmethod
+    def _load_from_dict(cls, silo_dict: dict) -> "FederatedLearningSilo":
+        data_input = silo_dict.get("inputs", {})
+        return FederatedLearningSilo(compute=silo_dict["compute"], datastore=silo_dict["datastore"], inputs=data_input)
+
+    # simple load based off mltable metadata loading style
+    @classmethod
+    def _load(
+        cls,
+        yaml_path: Optional[Union[PathLike, str]] = None,
+    ) -> "FederatedLearningSilo":
+        yaml_dict = load_yaml(yaml_path)
+        return FederatedLearningSilo._load_from_dict(silo_dict=yaml_dict)
+
+    @classmethod
+    def load_list(
+        cls,
+        *,
+        yaml_path: Optional[Union[PathLike, str]],
+        list_arg: str,
+    ) -> List["FederatedLearningSilo"]:
+        """
+        Loads a list of federated learning silos from YAML. This is the expected entry point
+        for this class; load a list of these, then supply them to the federated learning DSL
+        package node in order to produce an FL pipeline.
+
+        The structure of the supplied YAML file is assumed to be a list of FL silos under the
+        name specified by the list_arg input, as shown below.
+
+        list_arg:
+        - silo 1 ...
+        - silo 2 ...
+
+        :keyword yaml_path: A path leading to a local YAML file which contains a list of
+            FederatedLearningSilo objects.
+        :paramtype yaml_path: Optional[Union[PathLike, str]]
+        :keyword list_arg: A string that names the top-level value which contains the list
+            of FL silos.
+        :paramtype list_arg: str
+        :return: The list of federated learning silos
+        :rtype: List[FederatedLearningSilo]
+        """
+        yaml_dict = load_yaml(yaml_path)
+        return [
+            FederatedLearningSilo._load_from_dict(silo_dict=silo_yaml_dict) for silo_yaml_dict in yaml_dict[list_arg]
+        ]
+
+    # There are no to/from rest object functions because this object has no
+    # rest object equivalent. Any conversions should be done as part of the
+    # to/from rest object functions of OTHER entity objects.
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_assets/intellectual_property.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_assets/intellectual_property.py
new file mode 100644
index 00000000..58b96a1b
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_assets/intellectual_property.py
@@ -0,0 +1,49 @@
+# ---------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# ---------------------------------------------------------
+
+from typing import Any, Optional
+
+from azure.ai.ml._restclient.v2023_04_01_preview.models import IntellectualProperty as RestIntellectualProperty
+from azure.ai.ml._utils._experimental import experimental
+from azure.ai.ml.constants._assets import IPProtectionLevel
+from azure.ai.ml.entities._mixins import RestTranslatableMixin
+
+
+@experimental
+class IntellectualProperty(RestTranslatableMixin):
+    """Intellectual property settings definition.
+
+    :keyword publisher: The publisher's name.
+    :paramtype publisher: Optional[str]
+    :keyword protection_level: Asset Protection Level. Accepted values are IPProtectionLevel.ALL ("all") and
+        IPProtectionLevel.NONE ("none"). Defaults to IPProtectionLevel.ALL ("all").
+    :paramtype protection_level: Optional[Union[str, ~azure.ai.ml.constants.IPProtectionLevel]]
+
+    .. admonition:: Example:
+
+        .. literalinclude:: ../samples/ml_samples_misc.py
+            :start-after: [START intellectual_property_configuration]
+            :end-before: [END intellectual_property_configuration]
+            :language: python
+            :dedent: 8
+            :caption: Configuring intellectual property settings on a CommandComponent.
+    """
+
+    def __init__(
+        self, *, publisher: Optional[str] = None, protection_level: IPProtectionLevel = IPProtectionLevel.ALL
+    ) -> None:
+        self.publisher = publisher
+        self.protection_level = protection_level
+
+    def _to_rest_object(self) -> RestIntellectualProperty:
+        return RestIntellectualProperty(publisher=self.publisher, protection_level=self.protection_level)
+
+    @classmethod
+    def _from_rest_object(cls, obj: RestIntellectualProperty) -> "IntellectualProperty":
+        return cls(publisher=obj.publisher, protection_level=obj.protection_level)
+
+    def __eq__(self, other: Any) -> bool:
+        if not isinstance(other, IntellectualProperty):
+            return NotImplemented
+        return self.publisher == other.publisher and self.protection_level == other.protection_level
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_assets/workspace_asset_reference.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_assets/workspace_asset_reference.py
new file mode 100644
index 00000000..1e7d1ba2
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_assets/workspace_asset_reference.py
@@ -0,0 +1,87 @@
+# ---------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# ---------------------------------------------------------
+
+import os
+from pathlib import Path
+from typing import Any, Dict, Optional, Union
+
+from azure.ai.ml._restclient.v2021_10_01_dataplanepreview.models import (
+    ResourceManagementAssetReferenceData,
+    ResourceManagementAssetReferenceDetails,
+)
+from azure.ai.ml._schema import WorkspaceAssetReferenceSchema
+from azure.ai.ml.constants._common import BASE_PATH_CONTEXT_KEY, PARAMS_OVERRIDE_KEY
+from azure.ai.ml.entities._assets.asset import Asset
+from azure.ai.ml.entities._util import load_from_dict
+
+
+class WorkspaceAssetReference(Asset):
+    """Workspace Model Reference.
+
+    This is for SDK internal use only, might be deprecated in the future.
+    :param name: Model name
+    :type name: str
+    :param version: Model version
+    :type version: str
+    :param asset_id: Model asset id
+    :type version: str
+    :param kwargs: A dictionary of additional configuration parameters.
+    :type kwargs: dict
+    """
+
+    def __init__(
+        self,
+        *,
+        name: Optional[str] = None,
+        version: Optional[str] = None,
+        asset_id: Optional[str] = None,
+        properties: Optional[Dict] = None,
+        **kwargs: Any,
+    ):
+        super().__init__(
+            name=name,
+            version=version,
+            properties=properties,
+            **kwargs,
+        )
+        self.asset_id = asset_id
+
+    @classmethod
+    def _load(
+        cls: Any,
+        data: Optional[dict] = None,
+        yaml_path: Optional[Union[os.PathLike, str]] = None,
+        params_override: Optional[list] = None,
+        **kwargs: Any,
+    ) -> "WorkspaceAssetReference":
+        data = data or {}
+        params_override = params_override or []
+        context = {
+            BASE_PATH_CONTEXT_KEY: Path(yaml_path).parent if yaml_path else Path("./"),
+            PARAMS_OVERRIDE_KEY: params_override,
+        }
+        res: WorkspaceAssetReference = load_from_dict(WorkspaceAssetReferenceSchema, data, context, **kwargs)
+        return res
+
+    def _to_rest_object(self) -> ResourceManagementAssetReferenceData:
+        resource_management_details = ResourceManagementAssetReferenceDetails(
+            destination_name=self.name,
+            destination_version=self.version,
+            source_asset_id=self.asset_id,
+        )
+        resource_management = ResourceManagementAssetReferenceData(properties=resource_management_details)
+        return resource_management
+
+    @classmethod
+    def _from_rest_object(cls, resource_object: ResourceManagementAssetReferenceData) -> "WorkspaceAssetReference":
+        resource_management = WorkspaceAssetReference(
+            name=resource_object.properties.destination_name,
+            version=resource_object.properties.destination_version,
+            asset_id=resource_object.properties.source_asset_id,
+        )
+
+        return resource_management
+
+    def _to_dict(self) -> Dict:
+        return dict(WorkspaceAssetReferenceSchema(context={BASE_PATH_CONTEXT_KEY: "./"}).dump(self))