about summary refs log tree commit diff
path: root/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_workspace
diff options
context:
space:
mode:
authorS. Solomon Darnell2025-03-28 21:52:21 -0500
committerS. Solomon Darnell2025-03-28 21:52:21 -0500
commit4a52a71956a8d46fcb7294ac71734504bb09bcc2 (patch)
treeee3dc5af3b6313e921cd920906356f5d4febc4ed /.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_workspace
parentcc961e04ba734dd72309fb548a2f97d67d578813 (diff)
downloadgn-ai-master.tar.gz
two version of R2R are here HEAD master
Diffstat (limited to '.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_workspace')
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_workspace/__init__.py5
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_workspace/_ai_workspaces/__init__.py5
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_workspace/_ai_workspaces/_constants.py5
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_workspace/_ai_workspaces/capability_host.py187
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_workspace/_ai_workspaces/hub.py220
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_workspace/_ai_workspaces/project.py89
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_workspace/compute_runtime.py41
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_workspace/connections/__init__.py5
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_workspace/connections/connection_subtypes.py748
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_workspace/connections/one_lake_artifacts.py25
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_workspace/connections/workspace_connection.py677
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_workspace/customer_managed_key.py48
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_workspace/diagnose.py214
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_workspace/feature_store_settings.py61
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_workspace/network_acls.py90
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_workspace/networking.py348
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_workspace/private_endpoint.py53
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_workspace/serverless_compute.py52
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_workspace/workspace.py491
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_workspace/workspace_keys.py100
20 files changed, 3464 insertions, 0 deletions
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_workspace/__init__.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_workspace/__init__.py
new file mode 100644
index 00000000..fdf8caba
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_workspace/__init__.py
@@ -0,0 +1,5 @@
+# ---------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# ---------------------------------------------------------
+
+__path__ = __import__("pkgutil").extend_path(__path__, __name__)
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_workspace/_ai_workspaces/__init__.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_workspace/_ai_workspaces/__init__.py
new file mode 100644
index 00000000..fdf8caba
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_workspace/_ai_workspaces/__init__.py
@@ -0,0 +1,5 @@
+# ---------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# ---------------------------------------------------------
+
+__path__ = __import__("pkgutil").extend_path(__path__, __name__)
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_workspace/_ai_workspaces/_constants.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_workspace/_ai_workspaces/_constants.py
new file mode 100644
index 00000000..1e75a1c2
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_workspace/_ai_workspaces/_constants.py
@@ -0,0 +1,5 @@
+# ---------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# ---------------------------------------------------------
+
+ENDPOINT_AI_SERVICE_KIND = "AIServices"
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_workspace/_ai_workspaces/capability_host.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_workspace/_ai_workspaces/capability_host.py
new file mode 100644
index 00000000..f86ea8ed
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_workspace/_ai_workspaces/capability_host.py
@@ -0,0 +1,187 @@
+# ---------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# ---------------------------------------------------------
+
+import os
+from os import PathLike
+from typing import (
+    List,
+    Optional,
+    Union,
+    IO,
+    Any,
+    AnyStr,
+    Dict,
+)
+from pathlib import Path
+from azure.ai.ml._utils._experimental import experimental
+from azure.ai.ml.entities._resource import Resource
+from azure.ai.ml.constants._workspace import CapabilityHostKind
+from azure.ai.ml.constants._common import (
+    BASE_PATH_CONTEXT_KEY,
+    PARAMS_OVERRIDE_KEY,
+)
+
+from azure.ai.ml._schema.workspace.ai_workspaces.capability_host import (
+    CapabilityHostSchema,
+)
+from azure.ai.ml._utils.utils import dump_yaml_to_file
+from azure.ai.ml.entities._util import load_from_dict
+from azure.ai.ml._restclient.v2024_10_01_preview.models._models_py3 import (
+    CapabilityHost as RestCapabilityHost,
+)
+from azure.ai.ml._restclient.v2024_10_01_preview.models._models_py3 import (
+    CapabilityHostProperties as RestCapabilityHostProperties,
+)
+
+
+@experimental
+class CapabilityHost(Resource):
+    """Initialize a CapabilityHost instance.
+    Capabilityhost management is controlled by MLClient's capabilityhosts operations.
+
+    :param name: The name of the capability host.
+    :type name: str
+    :param description: The description of the capability host.
+    :type description: Optional[str]
+    :param vector_store_connections: A list of vector store  (AI Search) connections.
+    :type vector_store_connections: Optional[List[str]]
+    :param ai_services_connections: A list of OpenAI service connection.
+    :type ai_services_connections: Optional[List[str]]
+    :param storage_connections: A list of storage connections. Default storage connection value is
+        projectname/workspaceblobstore for project workspace.
+    :type storage_connections: Optional[List[str]]
+    :param capability_host_kind: The kind of capability host, either as a string or CapabilityHostKind enum.
+        Default is AGENTS.
+    :type capability_host_kind: Union[str, CapabilityHostKind]
+    :param kwargs: Additional keyword arguments.
+    :type kwargs: Any
+
+    .. admonition:: Example:
+
+        .. literalinclude:: ../samples/ml_samples_capability_host.py
+            :start-after: [START capability_host_object_create]
+            :end-before: [END capability_host_object_create]
+            :language: python
+            :dedent: 8
+            :caption: Create a CapabilityHost object.
+    """
+
+    def __init__(
+        self,
+        *,
+        name: str,
+        description: Optional[str] = None,
+        vector_store_connections: Optional[List[str]] = None,
+        ai_services_connections: Optional[List[str]] = None,
+        storage_connections: Optional[List[str]] = None,
+        capability_host_kind: Union[str, CapabilityHostKind] = CapabilityHostKind.AGENTS,
+        **kwargs: Any,
+    ):
+        super().__init__(name=name, description=description, **kwargs)
+        self.capability_host_kind = capability_host_kind
+        self.ai_services_connections = ai_services_connections
+        self.storage_connections = storage_connections
+        self.vector_store_connections = vector_store_connections
+
+    def dump(
+        self,
+        dest: Optional[Union[str, PathLike, IO[AnyStr]]],
+        **kwargs: Any,
+    ) -> None:
+        """Dump the CapabilityHost content into a file in yaml format.
+
+        :param dest: The destination to receive this CapabilityHost's content.
+            Must be either a path to a local file, or an already-open file stream.
+            If dest is a file path, a new file will be created,
+            and an exception is raised if the file exists.
+            If dest is an open file, the file will be written to directly,
+            and an exception will be raised if the file is not writable.
+        :type dest: Union[PathLike, str, IO[AnyStr]]
+        """
+        path = kwargs.pop("path", None)
+        yaml_serialized = self._to_dict()
+        dump_yaml_to_file(dest, yaml_serialized, default_flow_style=False, path=path, **kwargs)
+
+    def _to_dict(self) -> Dict:
+        """Dump the object into a dictionary.
+
+        :return: Dictionary representation of the object.
+        :rtype: Dict
+        """
+
+        return CapabilityHostSchema(context={BASE_PATH_CONTEXT_KEY: "./"}).dump(self)
+
+    @classmethod
+    def _load(
+        cls,
+        data: Optional[dict] = None,
+        yaml_path: Optional[Union[os.PathLike, str]] = None,
+        params_override: Optional[list] = None,
+        **kwargs: Any,
+    ) -> "CapabilityHost":
+        """Load a capabilityhost object from a yaml file.
+
+        :param cls: Indicates that this is a class method.
+        :type cls: class
+        :param data: Data Dictionary, defaults to None
+        :type data: Dict
+        :param yaml_path: YAML Path, defaults to None
+        :type yaml_path: Union[PathLike, str]
+        :param params_override: Fields to overwrite on top of the yaml file.
+            Format is [{"field1": "value1"}, {"field2": "value2"}], defaults to None
+        :type params_override: List[Dict]
+        :raises Exception: An exception
+        :return: Loaded CapabilityHost object.
+        :rtype: ~azure.ai.ml.entities._workspace._ai_workspaces.capability_host.CapabilityHost
+        """
+        params_override = params_override or []
+        data = data or {}
+        context = {
+            BASE_PATH_CONTEXT_KEY: Path(yaml_path).parent if yaml_path else Path("./"),
+            PARAMS_OVERRIDE_KEY: params_override,
+        }
+        return cls(**load_from_dict(CapabilityHostSchema, data, context, **kwargs))
+
+    @classmethod
+    def _from_rest_object(cls, rest_obj: RestCapabilityHost) -> "CapabilityHost":
+        """Convert a REST object into a CapabilityHost object.
+
+        :param cls: Indicates that this is a class method.
+        :type cls: class
+        :param rest_obj: The REST object to convert.
+        :type rest_obj: ~azure.ai.ml._restclient.v2024_10_01_preview.models._models_py3.CapabilityHost
+        :return: CapabilityHost object.
+        :rtype: ~azure.ai.ml.entities._workspace._ai_workspaces.capability_host.CapabilityHost
+        """
+        capability_host = cls(
+            name=str(rest_obj.name),
+            description=(rest_obj.properties.description if rest_obj.properties else None),
+            ai_services_connections=(rest_obj.properties.ai_services_connections if rest_obj.properties else None),
+            storage_connections=(rest_obj.properties.storage_connections if rest_obj.properties else None),
+            vector_store_connections=(rest_obj.properties.vector_store_connections if rest_obj.properties else None),
+            capability_host_kind=(
+                rest_obj.properties.capability_host_kind if rest_obj.properties else CapabilityHostKind.AGENTS
+            ),
+        )
+        return capability_host
+
+    def _to_rest_object(self) -> RestCapabilityHost:
+        """
+        Convert the CapabilityHost instance to a RestCapabilityHost object.
+
+        :return: A RestCapabilityHost object representing the capability host for a Hub or Project workspace.
+        :rtype: azure.ai.ml._restclient.v2024_10_01_preview.models._models_py3.CapabilityHost
+        """
+
+        properties = RestCapabilityHostProperties(
+            ai_services_connections=self.ai_services_connections,
+            storage_connections=self.storage_connections,
+            vector_store_connections=self.vector_store_connections,
+            description=self.description,
+            capability_host_kind=self.capability_host_kind,
+        )
+        resource = RestCapabilityHost(
+            properties=properties,
+        )
+        return resource
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_workspace/_ai_workspaces/hub.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_workspace/_ai_workspaces/hub.py
new file mode 100644
index 00000000..4caac057
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_workspace/_ai_workspaces/hub.py
@@ -0,0 +1,220 @@
+# ---------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# ---------------------------------------------------------
+
+from typing import Any, Dict, List, Optional
+
+from azure.ai.ml._restclient.v2024_10_01_preview.models import Workspace as RestWorkspace
+from azure.ai.ml._restclient.v2024_10_01_preview.models import WorkspaceHubConfig as RestWorkspaceHubConfig
+from azure.ai.ml._schema.workspace import HubSchema
+from azure.ai.ml._utils._experimental import experimental
+from azure.ai.ml.constants._common import WorkspaceKind
+from azure.ai.ml.entities._credentials import IdentityConfiguration
+from azure.ai.ml.entities._workspace.customer_managed_key import CustomerManagedKey
+from azure.ai.ml.entities._workspace.network_acls import NetworkAcls
+from azure.ai.ml.entities._workspace.networking import ManagedNetwork
+from azure.ai.ml.entities._workspace.workspace import Workspace
+
+
+@experimental
+class Hub(Workspace):
+    """A Hub is a special type of workspace that acts as a parent and resource container for lightweight child
+    workspaces called projects. Resources like the hub's storage account, key vault,
+    and container registry are shared by all child projects.
+
+    As a type of workspace, hub management is controlled by an MLClient's workspace operations.
+
+    :param name: Name of the hub.
+    :type name: str
+    :param description: Description of the hub.
+    :type description: str
+    :param tags: Tags of the hub.
+    :type tags: dict
+    :param display_name: Display name for the hub. This is non-unique within the resource group.
+    :type display_name: str
+    :param location: The location to create the hub in.
+        If not specified, the same location as the resource group will be used.
+    :type location: str
+    :param resource_group: Name of resource group to create the hub in.
+    :type resource_group: str
+    :param managed_network: Hub's Managed Network configuration
+    :type managed_network: ~azure.ai.ml.entities.ManagedNetwork
+    :param storage_account: The resource ID of an existing storage account to use instead of creating a new one.
+    :type storage_account: str
+    :param key_vault: The resource ID of an existing key vault to use instead of creating a new one.
+    :type key_vault: str
+    :param container_registry: The resource ID of an existing container registry
+        to use instead of creating a new one.
+    :type container_registry: str
+    :param customer_managed_key: Key vault details for encrypting data with customer-managed keys.
+        If not specified, Microsoft-managed keys will be used by default.
+    :type customer_managed_key: ~azure.ai.ml.entities.CustomerManagedKey
+    :param image_build_compute: The name of the compute target to use for building environment.
+        Docker images with the container registry is behind a VNet.
+    :type image_build_compute: str
+    :param public_network_access: Whether to allow public endpoint connectivity.
+        when a workspace is private link enabled.
+    :type public_network_access: str
+    :param network_acls: The network access control list (ACL) settings of the workspace.
+    :type network_acls: ~azure.ai.ml.entities.NetworkAcls
+    :param identity: The hub's Managed Identity (user assigned, or system assigned).
+    :type identity: ~azure.ai.ml.entities.IdentityConfiguration
+    :param primary_user_assigned_identity: The hub's primary user assigned identity.
+    :type primary_user_assigned_identity: str
+    :param enable_data_isolation: A flag to determine if workspace has data isolation enabled.
+        The flag can only be set at the creation phase, it can't be updated.
+    :type enable_data_isolation: bool
+    :param default_resource_group: The resource group that will be used by projects
+        created under this hub if no resource group is specified.
+    :type default_resource_group: str
+    :param kwargs: A dictionary of additional configuration parameters.
+    :type kwargs: dict
+
+    .. literalinclude:: ../samples/ml_samples_workspace.py
+            :start-after: [START workspace_hub]
+            :end-before: [END workspace_hub]
+            :language: python
+            :dedent: 8
+            :caption: Creating a Hub object.
+    """
+
+    # The field 'additional_workspace_storage_accounts' exists in the API but is currently unused.
+
+    def __init__(
+        self,
+        *,
+        name: str,
+        description: Optional[str] = None,
+        tags: Optional[Dict[str, str]] = None,
+        display_name: Optional[str] = None,
+        location: Optional[str] = None,
+        resource_group: Optional[str] = None,
+        managed_network: Optional[ManagedNetwork] = None,
+        storage_account: Optional[str] = None,
+        key_vault: Optional[str] = None,
+        container_registry: Optional[str] = None,
+        customer_managed_key: Optional[CustomerManagedKey] = None,
+        public_network_access: Optional[str] = None,
+        network_acls: Optional[NetworkAcls] = None,
+        identity: Optional[IdentityConfiguration] = None,
+        primary_user_assigned_identity: Optional[str] = None,
+        enable_data_isolation: bool = False,
+        default_resource_group: Optional[str] = None,
+        associated_workspaces: Optional[List[str]] = None,  # hidden input for rest->client conversions.
+        **kwargs: Any,
+    ):
+        self._workspace_id = kwargs.pop("workspace_id", "")
+        # Ensure user can't overwrite/double input kind.
+        kwargs.pop("kind", None)
+        super().__init__(
+            name=name,
+            description=description,
+            tags=tags,
+            kind=WorkspaceKind.HUB,
+            display_name=display_name,
+            location=location,
+            storage_account=storage_account,
+            key_vault=key_vault,
+            container_registry=container_registry,
+            resource_group=resource_group,
+            customer_managed_key=customer_managed_key,
+            public_network_access=public_network_access,
+            network_acls=network_acls,
+            identity=identity,
+            primary_user_assigned_identity=primary_user_assigned_identity,
+            managed_network=managed_network,
+            enable_data_isolation=enable_data_isolation,
+            **kwargs,
+        )
+        self._default_resource_group = default_resource_group
+        self._associated_workspaces = associated_workspaces
+
+    @classmethod
+    def _get_schema_class(cls):
+        return HubSchema
+
+    @classmethod
+    def _from_rest_object(cls, rest_obj: RestWorkspace, v2_service_context: Optional[object] = None) -> Optional["Hub"]:
+        if not rest_obj:
+            return None
+
+        workspace_object = Workspace._from_rest_object(rest_obj, v2_service_context)
+
+        default_resource_group = None
+
+        if hasattr(rest_obj, "workspace_hub_config"):
+            if rest_obj.workspace_hub_config and isinstance(rest_obj.workspace_hub_config, RestWorkspaceHubConfig):
+                default_resource_group = rest_obj.workspace_hub_config.default_workspace_resource_group
+
+        if workspace_object is not None:
+            return Hub(
+                name=workspace_object.name if workspace_object.name is not None else "",
+                description=workspace_object.description,
+                tags=workspace_object.tags,
+                display_name=workspace_object.display_name,
+                location=workspace_object.location,
+                resource_group=workspace_object.resource_group,
+                managed_network=workspace_object.managed_network,
+                customer_managed_key=workspace_object.customer_managed_key,
+                public_network_access=workspace_object.public_network_access,
+                network_acls=workspace_object.network_acls,
+                identity=workspace_object.identity,
+                primary_user_assigned_identity=workspace_object.primary_user_assigned_identity,
+                storage_account=rest_obj.storage_account,
+                key_vault=rest_obj.key_vault,
+                container_registry=rest_obj.container_registry,
+                workspace_id=rest_obj.workspace_id,
+                enable_data_isolation=rest_obj.enable_data_isolation,
+                default_resource_group=default_resource_group,
+                associated_workspaces=rest_obj.associated_workspaces if rest_obj.associated_workspaces else [],
+                id=rest_obj.id,
+            )
+        return None
+
+    # Helper function to deal with sub-rest object conversion.
+    def _hub_values_to_rest_object(self) -> RestWorkspaceHubConfig:
+        additional_workspace_storage_accounts = None
+        default_resource_group = None
+        if hasattr(self, "additional_workspace_storage_accounts"):
+            additional_workspace_storage_accounts = None
+        if hasattr(self, "default_resource_group"):
+            default_resource_group = None
+        return RestWorkspaceHubConfig(
+            additional_workspace_storage_accounts=additional_workspace_storage_accounts,
+            default_workspace_resource_group=default_resource_group,
+        )
+
+    def _to_rest_object(self) -> RestWorkspace:
+        restWorkspace = super()._to_rest_object()
+        restWorkspace.workspace_hub_config = self._hub_values_to_rest_object()
+        return restWorkspace
+
+    @property
+    def default_resource_group(self) -> Optional[str]:
+        """The default resource group for this hub and its children.
+
+        :return: The resource group.
+        :rtype: Optional[str]
+        """
+        return self._default_resource_group
+
+    @default_resource_group.setter
+    def default_resource_group(self, value: str):
+        """Set the default resource group for child projects of this hub.
+
+        :param value: The new resource group.
+        :type value: str
+        """
+        if not value:
+            return
+        self._default_resource_group = value
+
+    # No setter, read-only
+    @property
+    def associated_workspaces(self) -> Optional[List[str]]:
+        """The workspaces associated with the hub.
+
+        :return: The resource group.
+        :rtype:  Optional[List[str]]
+        """
+        return self._associated_workspaces
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_workspace/_ai_workspaces/project.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_workspace/_ai_workspaces/project.py
new file mode 100644
index 00000000..ffad4922
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_workspace/_ai_workspaces/project.py
@@ -0,0 +1,89 @@
+# ---------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# ---------------------------------------------------------
+
+
+from typing import Any, Dict, Optional
+
+from azure.ai.ml._schema.workspace import ProjectSchema
+from azure.ai.ml._utils._experimental import experimental
+from azure.ai.ml.constants._common import WorkspaceKind
+from azure.ai.ml.entities._workspace.workspace import Workspace
+
+
+# Effectively a lightweight wrapper around a v2 SDK workspace
+@experimental
+class Project(Workspace):
+    """A Project is a lightweight object for orchestrating AI applications, and is parented by a hub.
+    Unlike a standard workspace, a project does not have a variety of sub-resources directly associated with it.
+    Instead, its parent hub managed these resources, which are then used by the project and its siblings.
+
+    As a type of workspace, project management is controlled by an MLClient's workspace operations.
+
+    :param name: The name of the project.
+    :type name: str
+    :param hub_id: The hub parent of the project, as a resource ID.
+    :type hub_id: str
+    :param description: The description of the project.
+    :type description: Optional[str]
+    :param tags: Tags associated with the project.
+    :type tags: Optional[Dict[str, str]]
+    :param display_name: The display name of the project.
+    :type display_name: Optional[str]
+    :param location: The location of the project. Must match that of the parent hub
+        and is automatically assigned to match the parent hub's location during creation.
+    :type location: Optional[str]
+    :param resource_group: The project's resource group name.
+    :type resource_group: Optional[str]
+    """
+
+    def __init__(
+        self,
+        *,
+        name: str,
+        hub_id: str,
+        description: Optional[str] = None,
+        tags: Optional[Dict[str, str]] = None,
+        display_name: Optional[str] = None,
+        location: Optional[str] = None,
+        resource_group: Optional[str] = None,
+        **kwargs,
+    ) -> None:
+        # Ensure user can't overwrite/double input kind.
+        kwargs.pop("kind", None)
+        super().__init__(
+            name=name,
+            description=description,
+            tags=tags,
+            kind=WorkspaceKind.PROJECT,
+            display_name=display_name,
+            location=location,
+            resource_group=resource_group,
+            hub_id=hub_id,
+            **kwargs,
+        )
+
+    @classmethod
+    def _get_schema_class(cls) -> Any:
+        return ProjectSchema
+
+    @property
+    def hub_id(self) -> str:
+        """The UID of the hub parent of the project.
+
+        :return: Resource ID of the parent hub.
+        :rtype: str
+        """
+        return self._hub_id if self._hub_id else ""
+
+    @hub_id.setter
+    def hub_id(self, value: str):
+        """Set the parent hub id of the project.
+
+        :param value: The hub id to assign to the project.
+            Note: cannot be reassigned after creation.
+        :type value: str
+        """
+        if not value:
+            return
+        self._hub_id = value
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_workspace/compute_runtime.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_workspace/compute_runtime.py
new file mode 100644
index 00000000..bc7ee127
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_workspace/compute_runtime.py
@@ -0,0 +1,41 @@
+# ---------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# ---------------------------------------------------------
+
+from typing import Optional
+
+from azure.ai.ml._restclient.v2023_06_01_preview.models import ComputeRuntimeDto as RestComputeRuntimeDto
+from azure.ai.ml.entities._mixins import RestTranslatableMixin
+
+
+class ComputeRuntime(RestTranslatableMixin):
+    """Spark compute runtime configuration.
+
+    :keyword spark_runtime_version: Spark runtime version.
+    :paramtype spark_runtime_version: Optional[str]
+
+    .. admonition:: Example:
+
+        .. literalinclude:: ../samples/ml_samples_compute.py
+            :start-after: [START compute_runtime]
+            :end-before: [END compute_runtime]
+            :language: python
+            :dedent: 8
+            :caption: Creating a ComputeRuntime object.
+    """
+
+    def __init__(
+        self,
+        *,
+        spark_runtime_version: Optional[str] = None,
+    ) -> None:
+        self.spark_runtime_version = spark_runtime_version
+
+    def _to_rest_object(self) -> RestComputeRuntimeDto:
+        return RestComputeRuntimeDto(spark_runtime_version=self.spark_runtime_version)
+
+    @classmethod
+    def _from_rest_object(cls, obj: RestComputeRuntimeDto) -> Optional["ComputeRuntime"]:
+        if not obj:
+            return None
+        return ComputeRuntime(spark_runtime_version=obj.spark_runtime_version)
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_workspace/connections/__init__.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_workspace/connections/__init__.py
new file mode 100644
index 00000000..fdf8caba
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_workspace/connections/__init__.py
@@ -0,0 +1,5 @@
+# ---------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# ---------------------------------------------------------
+
+__path__ = __import__("pkgutil").extend_path(__path__, __name__)
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_workspace/connections/connection_subtypes.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_workspace/connections/connection_subtypes.py
new file mode 100644
index 00000000..d97e513e
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_workspace/connections/connection_subtypes.py
@@ -0,0 +1,748 @@
+# ---------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# ---------------------------------------------------------
+
+import re
+from typing import Any, Dict, List, Optional, Type, Union
+
+from azure.ai.ml._restclient.v2024_04_01_preview.models import ConnectionCategory
+from azure.ai.ml._schema.workspace.connections.connection_subtypes import (
+    APIKeyConnectionSchema,
+    AzureAISearchConnectionSchema,
+    AzureAIServicesConnectionSchema,
+    AzureBlobStoreConnectionSchema,
+    AzureContentSafetyConnectionSchema,
+    AzureOpenAIConnectionSchema,
+    AzureSpeechServicesConnectionSchema,
+    MicrosoftOneLakeConnectionSchema,
+    OpenAIConnectionSchema,
+    SerpConnectionSchema,
+    ServerlessConnectionSchema,
+)
+from azure.ai.ml._utils._experimental import experimental
+from azure.ai.ml._utils.utils import camel_to_snake
+from azure.ai.ml.constants._common import (
+    CONNECTION_ACCOUNT_NAME_KEY,
+    CONNECTION_API_TYPE_KEY,
+    CONNECTION_API_VERSION_KEY,
+    CONNECTION_CONTAINER_NAME_KEY,
+    CONNECTION_KIND_KEY,
+    CONNECTION_RESOURCE_ID_KEY,
+    CognitiveServiceKinds,
+    ConnectionTypes,
+)
+from azure.ai.ml.entities._credentials import AadCredentialConfiguration, ApiKeyConfiguration
+
+from .one_lake_artifacts import OneLakeConnectionArtifact
+from .workspace_connection import WorkspaceConnection
+
+# Dev notes: Any new classes require modifying the elif chains in the following functions in the
+# WorkspaceConnection parent class: _from_rest_object, _get_entity_class_from_type, _get_schema_class_from_type
+
+
+class AzureBlobStoreConnection(WorkspaceConnection):
+    """A connection to an Azure Blob Store.
+
+    :param name: Name of the connection.
+    :type name: str
+    :param url: The URL or ARM resource ID of the external resource.
+    :type url: str
+    :param container_name: The name of the container.
+    :type container_name: str
+    :param account_name: The name of the account.
+    :type account_name: str
+    :param credentials: The credentials for authenticating to the blob store. This type of
+        connection accepts 3 types of credentials: account key and SAS token credentials,
+        or NoneCredentialConfiguration for credential-less connections.
+    :type credentials: Union[
+        ~azure.ai.ml.entities.AccountKeyConfiguration,
+        ~azure.ai.ml.entities.SasTokenConfiguration,
+        ~azure.ai.ml.entities.AadCredentialConfiguration,
+        ]
+    :param metadata: Metadata dictionary.
+    :type metadata: Optional[dict[str,str]]
+    """
+
+    def __init__(
+        self,
+        *,
+        url: str,
+        container_name: str,
+        account_name: str,
+        metadata: Optional[Dict[Any, Any]] = None,
+        **kwargs,
+    ):
+        kwargs.pop("type", None)  # make sure we never somehow use wrong type
+        # Blob store connections returned from the API generally have no credentials, but we still don't want
+        # to silently run over user inputted connections if they want to play with them locally, so double-check
+        # kwargs for them.
+        if metadata is None:
+            metadata = {}
+        metadata[CONNECTION_CONTAINER_NAME_KEY] = container_name
+        metadata[CONNECTION_ACCOUNT_NAME_KEY] = account_name
+
+        super().__init__(
+            url=url,
+            type=camel_to_snake(ConnectionCategory.AZURE_BLOB),
+            from_child=True,
+            metadata=metadata,
+            **kwargs,
+        )
+
+    @classmethod
+    def _get_required_metadata_fields(cls) -> List[str]:
+        return [CONNECTION_CONTAINER_NAME_KEY, CONNECTION_ACCOUNT_NAME_KEY]
+
+    @classmethod
+    def _get_schema_class(cls) -> Type:
+        return AzureBlobStoreConnectionSchema
+
+    @property
+    def container_name(self) -> Optional[str]:
+        """The name of the connection's container.
+
+        :return: The name of the container.
+        :rtype: Optional[str]
+        """
+        if self.metadata is not None:
+            return self.metadata.get(CONNECTION_CONTAINER_NAME_KEY, None)
+        return None
+
+    @container_name.setter
+    def container_name(self, value: str) -> None:
+        """Set the container name of the connection.
+
+        :param value: The new container name to set.
+        :type value: str
+        """
+        if self.metadata is None:
+            self.metadata = {}
+        self.metadata[CONNECTION_CONTAINER_NAME_KEY] = value
+
+    @property
+    def account_name(self) -> Optional[str]:
+        """The name of the connection's account
+
+        :return: The name of the account.
+        :rtype: Optional[str]
+        """
+        if self.metadata is not None:
+            return self.metadata.get(CONNECTION_ACCOUNT_NAME_KEY, None)
+        return None
+
+    @account_name.setter
+    def account_name(self, value: str) -> None:
+        """Set the account name of the connection.
+
+        :param value: The new account name to set.
+        :type value: str
+        """
+        if self.metadata is None:
+            self.metadata = {}
+        self.metadata[CONNECTION_ACCOUNT_NAME_KEY] = value
+
+
+# Dev note: One lake connections are unfortunately unique in that it's extremely
+# difficult for customers to find out what the target for their system ought to be.
+# Due to this, we construct the target internally by composing more inputs
+# that are more user-accessible.
+class MicrosoftOneLakeConnection(WorkspaceConnection):
+    """A connection to a Microsoft One Lake. Connections of this type
+    are further specified by their artifact class type, although
+    the number of artifact classes is currently limited.
+
+    :param name: Name of the connection.
+    :type name: str
+    :param endpoint: The endpoint of the connection.
+    :type endpoint: str
+    :param artifact: The artifact class used to further specify the connection.
+    :type artifact: Optional[~azure.ai.ml.entities.OneLakeArtifact]
+    :param one_lake_workspace_name: The name, not ID, of the workspace where the One Lake
+        resource lives.
+    :type one_lake_workspace_name: Optional[str]
+    :param credentials: The credentials for authenticating to the blob store. This type of
+        connection accepts 3 types of credentials: account key and SAS token credentials,
+        or NoneCredentialConfiguration for credential-less connections.
+    :type credentials: Union[
+        ~azure.ai.ml.entities.AccessKeyConfiguration,
+        ~azure.ai.ml.entities.SasTokenConfiguration,
+        ~azure.ai.ml.entities.AadCredentialConfiguration,
+        ]
+    :param metadata: Metadata dictionary.
+    :type metadata: Optional[dict[str,str]]
+    """
+
+    def __init__(
+        self,
+        *,
+        endpoint: str,
+        artifact: Optional[OneLakeConnectionArtifact] = None,
+        one_lake_workspace_name: Optional[str] = None,
+        metadata: Optional[Dict[Any, Any]] = None,
+        **kwargs,
+    ):
+        kwargs.pop("type", None)  # make sure we never somehow use wrong type
+
+        # Allow target to be inputted for from-rest conversions where we don't
+        # need to worry about data-availability nonsense.
+        target = kwargs.pop("target", None)
+        if target is None:
+            if artifact is None:
+                raise ValueError("If target is unset, then artifact must be set")
+            if endpoint is None:
+                raise ValueError("If target is unset, then endpoint must be set")
+            if one_lake_workspace_name is None:
+                raise ValueError("If target is unset, then one_lake_workspace_name must be set")
+            target = MicrosoftOneLakeConnection._construct_target(endpoint, one_lake_workspace_name, artifact)
+        super().__init__(
+            target=target,
+            type=camel_to_snake(ConnectionCategory.AZURE_ONE_LAKE),
+            from_child=True,
+            metadata=metadata,
+            **kwargs,
+        )
+
+    @classmethod
+    def _get_schema_class(cls) -> Type:
+        return MicrosoftOneLakeConnectionSchema
+
+    # Target is constructed from user inputs, because it's apparently very difficult for users to
+    # directly access a One Lake's target URL.
+    @classmethod
+    def _construct_target(cls, endpoint: str, workspace: str, artifact: OneLakeConnectionArtifact) -> str:
+        artifact_name = artifact.name
+        # If an id is supplied, the format is different
+        if re.match(".{7}-.{4}-.{4}-.{4}.{12}", artifact_name):
+            return f"https://{endpoint}/{workspace}/{artifact_name}"
+        return f"https://{endpoint}/{workspace}/{artifact_name}.Lakehouse"
+
+
+# There are enough types of connections that their only accept an api key credential,
+# or just an api key credential or no credentials, that it merits a parent class for
+# all of them. One that's slightly more specific than the base Connection.
+# This file contains that parent class, as well as all of its children.
+# Not experimental since users should never see this,
+# No need to add an extra warning.
+class ApiOrAadConnection(WorkspaceConnection):
+    """Internal parent class for all connections that accept either an api key or
+    entra ID as credentials. Entra ID credentials are implicitly assumed if no api key is provided.
+
+    :param name: Name of the connection.
+    :type name: str
+    :param target: The URL or ARM resource ID of the external resource.
+    :type target: str
+    :param api_key: The api key to connect to the azure endpoint.
+        If unset, tries to use the user's Entra ID as credentials instead.
+    :type api_key: Optional[str]
+    :param api_version: The api version that this connection was created for.
+    :type api_version: Optional[str]
+    :param type: The type of the connection.
+    :type type: str
+    :param allow_entra: Whether or not this connection allows initialization without
+        an API key via Aad. Defaults to True.
+    :type allow_entra: bool
+    """
+
+    def __init__(
+        self,
+        *,
+        api_key: Optional[str] = None,
+        allow_entra: bool = True,
+        type: str,
+        metadata: Optional[Dict[Any, Any]] = None,
+        **kwargs: Any,
+    ):
+        # See if credentials directly inputted via kwargs
+        credentials: Union[AadCredentialConfiguration, ApiKeyConfiguration] = kwargs.pop(
+            "credentials", AadCredentialConfiguration()
+        )
+        # Replace anything that isn't an API credential with an AAD credential.
+        # Importantly, this replaced the None credential default from the parent YAML schema.
+        if not isinstance(credentials, ApiKeyConfiguration):
+            credentials = AadCredentialConfiguration()
+        # Further replace that if a key is provided
+        if api_key:
+            credentials = ApiKeyConfiguration(key=api_key)
+        elif not allow_entra and isinstance(credentials, AadCredentialConfiguration):
+            # If no creds are provided in any capacity when needed. complain.
+            raise ValueError("This connection type must set the api_key value.")
+
+        super().__init__(
+            type=type,
+            credentials=credentials,
+            metadata=metadata,
+            **kwargs,
+        )
+
+    @property
+    def api_key(self) -> Optional[str]:
+        """The API key of the connection.
+
+        :return: The API key of the connection.
+        :rtype: Optional[str]
+        """
+        if isinstance(self._credentials, ApiKeyConfiguration):
+            return self._credentials.key
+        return None
+
+    @api_key.setter
+    def api_key(self, value: str) -> None:
+        """Set the API key of the connection. Setting this to None will
+        cause the connection to use the user's Entra ID as credentials.
+
+        :param value: The new API key to set.
+        :type value: str
+        """
+        if value is None:
+            self._credentials = AadCredentialConfiguration()
+        else:
+            self._credentials = ApiKeyConfiguration(key=value)
+
+
+@experimental
+class AzureOpenAIConnection(ApiOrAadConnection):
+    """A Connection that is specifically designed for handling connections
+    to Azure Open AI.
+
+    :param name: Name of the connection.
+    :type name: str
+    :param azure_endpoint: The URL or ARM resource ID of the Azure Open AI Resource.
+    :type azure_endpoint: str
+    :param api_key: The api key to connect to the azure endpoint.
+        If unset, tries to use the user's Entra ID as credentials instead.
+    :type api_key: Optional[str]
+    :param open_ai_resource_id: The fully qualified ID of the Azure Open AI resource to connect to.
+    :type open_ai_resource_id: Optional[str]
+    :param api_version: The api version that this connection was created for.
+    :type api_version: Optional[str]
+    :param metadata: Metadata dictionary.
+    :type metadata: Optional[dict[str,str]]
+    """
+
+    def __init__(
+        self,
+        *,
+        azure_endpoint: str,
+        api_key: Optional[str] = None,
+        api_version: Optional[str] = None,
+        api_type: str = "Azure",  # Required API input, hidden to allow for rare overrides
+        open_ai_resource_id: Optional[str] = None,
+        metadata: Optional[Dict[Any, Any]] = None,
+        **kwargs: Any,
+    ):
+        kwargs.pop("type", None)  # make sure we never somehow use wrong type
+        # Sneak in resource ID as it's inputted from rest conversions as a kwarg.
+        from_rest_resource_id = kwargs.pop("resource_id", None)
+        if open_ai_resource_id is None and from_rest_resource_id is not None:
+            open_ai_resource_id = from_rest_resource_id
+
+        if metadata is None:
+            metadata = {}
+        metadata[CONNECTION_API_VERSION_KEY] = api_version
+        metadata[CONNECTION_API_TYPE_KEY] = api_type
+        metadata[CONNECTION_RESOURCE_ID_KEY] = open_ai_resource_id
+
+        super().__init__(
+            azure_endpoint=azure_endpoint,
+            api_key=api_key,
+            type=camel_to_snake(ConnectionCategory.AZURE_OPEN_AI),
+            from_child=True,
+            metadata=metadata,
+            **kwargs,
+        )
+
+    @classmethod
+    def _get_required_metadata_fields(cls) -> List[str]:
+        return [CONNECTION_API_VERSION_KEY, CONNECTION_API_TYPE_KEY, CONNECTION_RESOURCE_ID_KEY]
+
+    @classmethod
+    def _get_schema_class(cls) -> Type:
+        return AzureOpenAIConnectionSchema
+
+    @property
+    def api_version(self) -> Optional[str]:
+        """The API version of the connection.
+
+        :return: The API version of the connection.
+        :rtype: Optional[str]
+        """
+        if self.metadata is not None and CONNECTION_API_VERSION_KEY in self.metadata:
+            res: str = self.metadata[CONNECTION_API_VERSION_KEY]
+            return res
+        return None
+
+    @api_version.setter
+    def api_version(self, value: str) -> None:
+        """Set the API version of the connection.
+
+        :param value: The new api version to set.
+        :type value: str
+        """
+        if not hasattr(self, "metadata") or self.metadata is None:
+            self.metadata = {}
+        self.metadata[CONNECTION_API_VERSION_KEY] = value
+
+    @property
+    def open_ai_resource_id(self) -> Optional[str]:
+        """The fully qualified ID of the Azure Open AI resource this connects to.
+
+        :return: The fully qualified ID of the Azure Open AI resource this connects to.
+        :rtype: Optional[str]
+        """
+        if self.metadata is not None and CONNECTION_RESOURCE_ID_KEY in self.metadata:
+            res: str = self.metadata[CONNECTION_RESOURCE_ID_KEY]
+            return res
+        return None
+
+    @open_ai_resource_id.setter
+    def open_ai_resource_id(self, value: Optional[str]) -> None:
+        """Set the fully qualified ID of the Azure Open AI resource to connect to.
+
+        :param value: The new resource id to set.
+        :type value: Optional[str]
+        """
+        if not hasattr(self, "metadata") or self.metadata is None:
+            self.metadata = {}
+        if value is None:
+            self.metadata.pop(CONNECTION_RESOURCE_ID_KEY, None)
+            return
+        self.metadata[CONNECTION_RESOURCE_ID_KEY] = value
+
+
+@experimental
+class AzureAIServicesConnection(ApiOrAadConnection):
+    """A Connection geared towards Azure AI services.
+
+    :param name: Name of the connection.
+    :type name: str
+    :param endpoint: The URL or ARM resource ID of the external resource.
+    :type endpoint: str
+    :param api_key: The api key to connect to the azure endpoint.
+        If unset, tries to use the user's Entra ID as credentials instead.
+    :type api_key: Optional[str]
+    :param ai_services_resource_id: The fully qualified ID of the Azure AI service resource to connect to.
+    :type ai_services_resource_id: str
+    :param metadata: Metadata dictionary.
+    :type metadata: Optional[dict[str,str]]
+    """
+
+    def __init__(
+        self,
+        *,
+        endpoint: str,
+        api_key: Optional[str] = None,
+        ai_services_resource_id: str,
+        metadata: Optional[Dict[Any, Any]] = None,
+        **kwargs: Any,
+    ):
+        kwargs.pop("type", None)  # make sure we never somehow use wrong type
+        if metadata is None:
+            metadata = {}
+        metadata[CONNECTION_RESOURCE_ID_KEY] = ai_services_resource_id
+        super().__init__(
+            endpoint=endpoint,
+            api_key=api_key,
+            type=ConnectionTypes.AZURE_AI_SERVICES,
+            from_child=True,
+            metadata=metadata,
+            **kwargs,
+        )
+
+    @classmethod
+    def _get_schema_class(cls) -> Type:
+        return AzureAIServicesConnectionSchema
+
+    @classmethod
+    def _get_required_metadata_fields(cls) -> List[str]:
+        return [CONNECTION_RESOURCE_ID_KEY]
+
+    @property
+    def ai_services_resource_id(self) -> Optional[str]:
+        """The resource id of the ai service being connected to.
+
+        :return: The resource id of the ai service being connected to.
+        :rtype: Optional[str]
+        """
+        if self.metadata is not None and CONNECTION_RESOURCE_ID_KEY in self.metadata:
+            res: str = self.metadata[CONNECTION_RESOURCE_ID_KEY]
+            return res
+        return None
+
+    @ai_services_resource_id.setter
+    def ai_services_resource_id(self, value: str) -> None:
+        """Set the ai service resource id of the connection.
+
+        :param value: The new ai service resource id to set.
+        :type value: str
+        """
+        if not hasattr(self, "metadata") or self.metadata is None:
+            self.metadata = {}
+        self.metadata[CONNECTION_RESOURCE_ID_KEY] = value
+
+
+class AzureAISearchConnection(ApiOrAadConnection):
+    """A Connection that is specifically designed for handling connections to
+    Azure AI Search.
+
+    :param name: Name of the connection.
+    :type name: str
+    :param endpoint: The URL or ARM resource ID of the Azure AI Search Service
+    :type endpoint: str
+    :param api_key: The API key needed to connect to the Azure AI Search Service.
+    :type api_key: Optional[str]
+    :param metadata: Metadata dictionary.
+    :type metadata: Optional[dict[str,str]]
+    """
+
+    def __init__(
+        self,
+        *,
+        endpoint: str,
+        api_key: Optional[str] = None,
+        metadata: Optional[Dict[Any, Any]] = None,
+        **kwargs: Any,
+    ):
+        kwargs.pop("type", None)  # make sure we never somehow use wrong type
+
+        super().__init__(
+            endpoint=endpoint,
+            api_key=api_key,
+            type=ConnectionTypes.AZURE_SEARCH,
+            from_child=True,
+            metadata=metadata,
+            **kwargs,
+        )
+
+    @classmethod
+    def _get_schema_class(cls) -> Type:
+        return AzureAISearchConnectionSchema
+
+
+class AzureContentSafetyConnection(ApiOrAadConnection):
+    """A Connection geared towards a Azure Content Safety service.
+
+    :param name: Name of the connection.
+    :type name: str
+    :param endpoint: The URL or ARM resource ID of the external resource.
+    :type endpoint: str
+    :param api_key: The api key to connect to the azure endpoint.
+        If unset, tries to use the user's Entra ID as credentials instead.
+    :type api_key: Optional[str]
+    :param metadata: Metadata dictionary.
+    :type metadata: Optional[dict[str,str]]
+    """
+
+    def __init__(
+        self,
+        *,
+        endpoint: str,
+        api_key: Optional[str] = None,
+        metadata: Optional[Dict[Any, Any]] = None,
+        **kwargs: Any,
+    ):
+        kwargs.pop("type", None)  # make sure we never somehow use wrong type
+
+        if metadata is None:
+            metadata = {}
+        metadata[CONNECTION_KIND_KEY] = CognitiveServiceKinds.CONTENT_SAFETY
+
+        super().__init__(
+            endpoint=endpoint,
+            api_key=api_key,
+            type=ConnectionTypes.AZURE_CONTENT_SAFETY,
+            from_child=True,
+            metadata=metadata,
+            **kwargs,
+        )
+
+    @classmethod
+    def _get_schema_class(cls) -> Type:
+        return AzureContentSafetyConnectionSchema
+
+
+class AzureSpeechServicesConnection(ApiOrAadConnection):
+    """A Connection geared towards an Azure Speech service.
+
+    :param name: Name of the connection.
+    :type name: str
+    :param endpoint: The URL or ARM resource ID of the external resource.
+    :type endpoint: str
+    :param api_key: The api key to connect to the azure endpoint.
+        If unset, tries to use the user's Entra ID as credentials instead.
+    :type api_key: Optional[str]
+    :param metadata: Metadata dictionary.
+    :type metadata: Optional[dict[str,str]]
+    """
+
+    # kinds AzureOpenAI", "ContentSafety", and "Speech"
+
+    def __init__(
+        self,
+        *,
+        endpoint: str,
+        api_key: Optional[str] = None,
+        metadata: Optional[Dict[Any, Any]] = None,
+        **kwargs: Any,
+    ):
+        kwargs.pop("type", None)  # make sure we never somehow use wrong type
+
+        if metadata is None:
+            metadata = {}
+        metadata[CONNECTION_KIND_KEY] = CognitiveServiceKinds.SPEECH
+        super().__init__(
+            endpoint=endpoint,
+            api_key=api_key,
+            type=ConnectionTypes.AZURE_SPEECH_SERVICES,
+            from_child=True,
+            metadata=metadata,
+            **kwargs,
+        )
+
+    @classmethod
+    def _get_schema_class(cls) -> Type:
+        return AzureSpeechServicesConnectionSchema
+
+
+@experimental
+class APIKeyConnection(ApiOrAadConnection):
+    """A generic connection for any API key-based service.
+
+    :param name: Name of the connection.
+    :type name: str
+    :param api_base: The URL to target with this connection.
+    :type api_base: str
+    :param api_key: The API key needed to connect to the api_base.
+    :type api_key: Optional[str]
+    :param metadata: Metadata dictionary.
+    :type metadata: Optional[dict[str,str]]
+    """
+
+    def __init__(
+        self,
+        *,
+        api_base: str,
+        api_key: Optional[str] = None,
+        metadata: Optional[Dict[Any, Any]] = None,
+        **kwargs,
+    ):
+        kwargs.pop("type", None)  # make sure we never somehow use wrong type
+        super().__init__(
+            api_base=api_base,
+            api_key=api_key,
+            type=camel_to_snake(ConnectionCategory.API_KEY),
+            allow_entra=False,
+            from_child=True,
+            metadata=metadata,
+            **kwargs,
+        )
+
+    @classmethod
+    def _get_schema_class(cls) -> Type:
+        return APIKeyConnectionSchema
+
+
+@experimental
+class OpenAIConnection(ApiOrAadConnection):
+    """A connection geared towards direct connections to Open AI.
+    Not to be confused with the AzureOpenAIWorkspaceConnection, which is for Azure's Open AI services.
+
+    :param name: Name of the connection.
+    :type name: str
+    :param api_key: The API key needed to connect to the Open AI.
+    :type api_key: Optional[str]
+    :param metadata: Metadata dictionary.
+    :type metadata: Optional[dict[str,str]]
+    """
+
+    def __init__(
+        self,
+        *,
+        api_key: Optional[str] = None,
+        metadata: Optional[Dict[Any, Any]] = None,
+        **kwargs,
+    ):
+        kwargs.pop("type", None)  # make sure we never somehow use wrong type
+        super().__init__(
+            type=ConnectionCategory.Open_AI,
+            api_key=api_key,
+            allow_entra=False,
+            from_child=True,
+            metadata=metadata,
+            **kwargs,
+        )
+
+    @classmethod
+    def _get_schema_class(cls) -> Type:
+        return OpenAIConnectionSchema
+
+
+@experimental
+class SerpConnection(ApiOrAadConnection):
+    """A connection geared towards a Serp service (Open source search API Service)
+
+    :param name: Name of the connection.
+    :type name: str
+    :param api_key: The API key needed to connect to the Open AI.
+    :type api_key: Optional[str]
+    :param metadata: Metadata dictionary.
+    :type metadata: Optional[dict[str,str]]
+    """
+
+    def __init__(
+        self,
+        *,
+        api_key: Optional[str] = None,
+        metadata: Optional[Dict[Any, Any]] = None,
+        **kwargs,
+    ):
+        kwargs.pop("type", None)  # make sure we never somehow use wrong type
+        super().__init__(
+            type=ConnectionCategory.SERP,
+            api_key=api_key,
+            allow_entra=False,
+            from_child=True,
+            metadata=metadata,
+            **kwargs,
+        )
+
+    @classmethod
+    def _get_schema_class(cls) -> Type:
+        return SerpConnectionSchema
+
+
+@experimental
+class ServerlessConnection(ApiOrAadConnection):
+    """A connection geared towards a MaaS endpoint (Serverless).
+
+    :param name: Name of the connection.
+    :type name: str
+    :param endpoint: The serverless endpoint.
+    :type endpoint: str
+    :param api_key: The API key needed to connect to the endpoint.
+    :type api_key: Optional[str]
+    :param metadata: Metadata dictionary.
+    :type metadata: Optional[dict[str,str]]
+    """
+
+    def __init__(
+        self,
+        *,
+        endpoint: str,
+        api_key: Optional[str] = None,
+        metadata: Optional[Dict[Any, Any]] = None,
+        **kwargs,
+    ):
+        kwargs.pop("type", None)  # make sure we never somehow use wrong type
+        super().__init__(
+            type=ConnectionCategory.SERVERLESS,
+            endpoint=endpoint,
+            api_key=api_key,
+            allow_entra=False,
+            from_child=True,
+            metadata=metadata,
+            **kwargs,
+        )
+
+    @classmethod
+    def _get_schema_class(cls) -> Type:
+        return ServerlessConnectionSchema
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_workspace/connections/one_lake_artifacts.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_workspace/connections/one_lake_artifacts.py
new file mode 100644
index 00000000..ea81602f
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_workspace/connections/one_lake_artifacts.py
@@ -0,0 +1,25 @@
+# ---------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# ---------------------------------------------------------
+
+from typing import Any
+from azure.ai.ml._utils._experimental import experimental
+
+# Dev note: Supposedly there's going to be more artifact subclasses at some point.
+# If/when that comes to pass, we can worry about adding polymorphism to these classes.
+# For now, this is a one-off that's needed to help match the object structure that PF uses.
+
+
+# Why is this not called a "LakeHouseArtifact"?  Because despite the under-the-hood type,
+# users expect this variety to be called "OneLake".
+@experimental
+class OneLakeConnectionArtifact:
+    """Artifact class used by the Connection subclass known
+    as a MicrosoftOneLakeConnection. Supplying this class further
+    specifies the connection as a Lake House connection.
+    """
+
+    # Note: Kwargs exist just to silently absorb type from schema.
+    def __init__(self, *, name: str, **kwargs: Any):  # pylint: disable=unused-argument
+        self.name = name
+        self.type = "lake_house"
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_workspace/connections/workspace_connection.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_workspace/connections/workspace_connection.py
new file mode 100644
index 00000000..ab1ee9f8
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_workspace/connections/workspace_connection.py
@@ -0,0 +1,677 @@
+# ---------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# ---------------------------------------------------------
+
+# pylint: disable=protected-access
+
+import warnings
+from os import PathLike
+from pathlib import Path
+from typing import IO, Any, AnyStr, Dict, List, Optional, Type, Union, cast
+
+
+from azure.ai.ml._restclient.v2024_04_01_preview.models import (
+    WorkspaceConnectionPropertiesV2BasicResource as RestWorkspaceConnection,
+)
+from azure.ai.ml._restclient.v2024_04_01_preview.models import (
+    ConnectionCategory,
+    NoneAuthTypeWorkspaceConnectionProperties,
+    AADAuthTypeWorkspaceConnectionProperties,
+)
+
+from azure.ai.ml._schema.workspace.connections.workspace_connection import WorkspaceConnectionSchema
+from azure.ai.ml._utils.utils import _snake_to_camel, camel_to_snake, dump_yaml_to_file
+from azure.ai.ml.constants._common import (
+    BASE_PATH_CONTEXT_KEY,
+    PARAMS_OVERRIDE_KEY,
+    ConnectionTypes,
+    CognitiveServiceKinds,
+    CONNECTION_KIND_KEY,
+    CONNECTION_RESOURCE_ID_KEY,
+)
+from azure.ai.ml.entities._credentials import (
+    AccessKeyConfiguration,
+    ApiKeyConfiguration,
+    ManagedIdentityConfiguration,
+    NoneCredentialConfiguration,
+    PatTokenConfiguration,
+    SasTokenConfiguration,
+    ServicePrincipalConfiguration,
+    UsernamePasswordConfiguration,
+    _BaseIdentityConfiguration,
+    AccountKeyConfiguration,
+    AadCredentialConfiguration,
+)
+from azure.ai.ml.entities._resource import Resource
+from azure.ai.ml.entities._system_data import SystemData
+from azure.ai.ml.entities._util import load_from_dict
+
+
+CONNECTION_CATEGORY_TO_CREDENTIAL_MAP = {
+    ConnectionCategory.AZURE_BLOB: [AccessKeyConfiguration, SasTokenConfiguration, AadCredentialConfiguration],
+    ConnectionTypes.AZURE_DATA_LAKE_GEN_2: [
+        ServicePrincipalConfiguration,
+        AadCredentialConfiguration,
+        ManagedIdentityConfiguration,
+    ],
+    ConnectionCategory.GIT: [PatTokenConfiguration, NoneCredentialConfiguration, UsernamePasswordConfiguration],
+    ConnectionCategory.PYTHON_FEED: [UsernamePasswordConfiguration, PatTokenConfiguration, NoneCredentialConfiguration],
+    ConnectionCategory.CONTAINER_REGISTRY: [ManagedIdentityConfiguration, UsernamePasswordConfiguration],
+}
+
+DATASTORE_CONNECTIONS = {
+    ConnectionCategory.AZURE_BLOB,
+    ConnectionTypes.AZURE_DATA_LAKE_GEN_2,
+    ConnectionCategory.AZURE_ONE_LAKE,
+}
+
+CONNECTION_ALTERNATE_TARGET_NAMES = ["target", "api_base", "url", "azure_endpoint", "endpoint"]
+
+
+# Dev note: The acceptable strings for the type field are all snake_cased versions of the string constants defined
+# In the rest client enum defined at _azure_machine_learning_services_enums.ConnectionCategory.
+# We avoid directly referencing it in the docs to avoid restclient references.
+class WorkspaceConnection(Resource):
+    """Azure ML connection provides a secure way to store authentication and configuration information needed
+    to connect and interact with the external resources.
+
+    Note: For connections to OpenAI, Cognitive Search, and Cognitive Services, use the respective subclasses
+    (ex: ~azure.ai.ml.entities.OpenAIConnection) instead of instantiating this class directly.
+
+    :param name: Name of the connection.
+    :type name: str
+    :param target: The URL or ARM resource ID of the external resource.
+    :type target: str
+    :param metadata: Metadata dictionary.
+    :type metadata: Optional[Dict[str, Any]]
+    :param type: The category of external resource for this connection.
+    :type type: The type of connection, possible values are: "git", "python_feed", "container_registry",
+        "feature_store", "s3", "snowflake", "azure_sql_db", "azure_synapse_analytics", "azure_my_sql_db",
+        "azure_postgres_db", "adls_gen_2", "azure_one_lake", "custom".
+    :param credentials: The credentials for authenticating to the external resource. Note that certain connection
+        types (as defined by the type input) only accept certain types of credentials.
+    :type credentials: Union[
+        ~azure.ai.ml.entities.PatTokenConfiguration,
+        ~azure.ai.ml.entities.SasTokenConfiguration,
+        ~azure.ai.ml.entities.UsernamePasswordConfiguration,
+        ~azure.ai.ml.entities.ManagedIdentityConfiguration
+        ~azure.ai.ml.entities.ServicePrincipalConfiguration,
+        ~azure.ai.ml.entities.AccessKeyConfiguration,
+        ~azure.ai.ml.entities.ApiKeyConfiguration,
+        ~azure.ai.ml.entities.NoneCredentialConfiguration
+        ~azure.ai.ml.entities.AccountKeyConfiguration,
+        ~azure.ai.ml.entities.AadCredentialConfiguration,
+        None
+        ]
+    :param is_shared: For connections in project, this controls whether or not this connection
+        is shared amongst other projects that are shared by the parent hub. Defaults to true.
+    :type is_shared: bool
+    """
+
+    def __init__(
+        self,
+        *,
+        # TODO : Check if this is okay since it shadows builtin-type type
+        type: str,  # pylint: disable=redefined-builtin
+        credentials: Union[
+            PatTokenConfiguration,
+            SasTokenConfiguration,
+            UsernamePasswordConfiguration,
+            ManagedIdentityConfiguration,
+            ServicePrincipalConfiguration,
+            AccessKeyConfiguration,
+            ApiKeyConfiguration,
+            NoneCredentialConfiguration,
+            AccountKeyConfiguration,
+            AadCredentialConfiguration,
+        ],
+        is_shared: bool = True,
+        metadata: Optional[Dict[str, Any]] = None,
+        **kwargs: Any,
+    ):
+
+        # Dev note: This initializer has an undocumented kwarg "from_child" to determine if this initialization
+        # is from a child class.
+        # This kwarg is required to allow instantiation of types that are associated with subtypes without a
+        # warning printout.
+        # The additional undocumented kwarg "strict_typing" turns the warning into a value error.
+        from_child = kwargs.pop("from_child", False)
+        strict_typing = kwargs.pop("strict_typing", False)
+        correct_class = WorkspaceConnection._get_entity_class_from_type(type)
+        if not from_child and correct_class != WorkspaceConnection:
+            if strict_typing:
+                raise ValueError(
+                    f"Cannot instantiate a base Connection with a type of {type}. "
+                    f"Please use the appropriate subclass {correct_class.__name__} instead."
+                )
+            warnings.warn(
+                f"The connection of {type} has additional fields and should not be instantiated directly "
+                f"from the Connection class. Please use its subclass {correct_class.__name__} instead.",
+            )
+        # This disgusting code allows for a variety of inputs names to technically all
+        # act like the target field, while still maintaining the aggregate field as required.
+        target = None
+        for target_name in CONNECTION_ALTERNATE_TARGET_NAMES:
+            target = kwargs.pop(target_name, target)
+        if target is None and type not in {ConnectionCategory.SERP, ConnectionCategory.Open_AI}:
+            raise ValueError("target is a required field for Connection.")
+
+        tags = kwargs.pop("tags", None)
+        if tags is not None:
+            if metadata is not None:
+                # Update tags updated with metadata to make sure metadata values are preserved in case of conflicts.
+                tags.update(metadata)
+                metadata = tags
+                warnings.warn(
+                    "Tags are a deprecated field for connections, use metadata instead. Since both "
+                    + "metadata and tags are assigned, metadata values will take precedence in the event of conflicts."
+                )
+            else:
+                metadata = tags
+                warnings.warn("Tags are a deprecated field for connections, use metadata instead.")
+
+        super().__init__(**kwargs)
+
+        self.type = type
+        self._target = target
+        self._credentials = credentials
+        self._is_shared = is_shared
+        self._metadata = metadata
+        self._validate_cred_for_conn_cat()
+
+    def _validate_cred_for_conn_cat(self) -> None:
+        """Given a connection type, ensure that the given credentials are valid for that connection type.
+        Does not validate the actual data of the inputted credential, just that they are of the right class
+        type.
+
+        """
+        # Convert none credentials to AAD credentials for datastore connection types.
+        # The backend stores datastore aad creds as none, unlike other connection types with aad,
+        # which actually list them as aad. This IS distinct from regular none credentials, or so I've been told,
+        # so I will endeavor to smooth over that inconsistency here.
+        converted_type = _snake_to_camel(self.type).lower()
+        if self._credentials == NoneCredentialConfiguration() and any(
+            converted_type == _snake_to_camel(item).lower() for item in DATASTORE_CONNECTIONS
+        ):
+            self._credentials = AadCredentialConfiguration()
+
+        if self.type in CONNECTION_CATEGORY_TO_CREDENTIAL_MAP:
+            allowed_credentials = CONNECTION_CATEGORY_TO_CREDENTIAL_MAP[self.type]
+            if self.credentials is None and NoneCredentialConfiguration not in allowed_credentials:
+                raise ValueError(
+                    f"Cannot instantiate a Connection with a type of {self.type} and no credentials."
+                    f"Please supply credentials from one of the following types: {allowed_credentials}."
+                )
+            cred_type = type(self.credentials)
+            if cred_type not in allowed_credentials:
+                raise ValueError(
+                    f"Cannot instantiate a Connection with a type of {self.type} and credentials of type"
+                    f" {cred_type}. Please supply credentials from one of the following types: {allowed_credentials}."
+                )
+        # For unknown types, just let the user do whatever they want.
+
+    @property
+    def type(self) -> Optional[str]:
+        """Type of the connection, supported are 'git', 'python_feed' and 'container_registry'.
+
+        :return: Type of the job.
+        :rtype: str
+        """
+        return self._type
+
+    @type.setter
+    def type(self, value: str) -> None:
+        """Set the type of the connection, supported are 'git', 'python_feed' and 'container_registry'.
+
+        :param value: value for the type of connection.
+        :type: str
+        """
+        if not value:
+            return
+        self._type: Optional[str] = camel_to_snake(value)
+
+    @property
+    def target(self) -> Optional[str]:
+        """Target url for the connection.
+
+        :return: Target of the connection.
+        :rtype: Optional[str]
+        """
+        return self._target
+
+    @property
+    def endpoint(self) -> Optional[str]:
+        """Alternate name for the target of the connection,
+        which is used by some connection subclasses.
+
+        :return: The target of the connection.
+        :rtype: str
+        """
+        return self.target
+
+    @property
+    def azure_endpoint(self) -> Optional[str]:
+        """Alternate name for the target of the connection,
+        which is used by some connection subclasses.
+
+        :return: The target of the connection.
+        :rtype: str
+        """
+        return self.target
+
+    @property
+    def url(self) -> Optional[str]:
+        """Alternate name for the target of the connection,
+        which is used by some connection subclasses.
+
+        :return: The target of the connection.
+        :rtype: str
+        """
+        return self.target
+
+    @property
+    def api_base(self) -> Optional[str]:
+        """Alternate name for the target of the connection,
+        which is used by some connection subclasses.
+
+        :return: The target of the connection.
+        :rtype: str
+        """
+        return self.target
+
+    @property
+    def credentials(
+        self,
+    ) -> Union[
+        PatTokenConfiguration,
+        SasTokenConfiguration,
+        UsernamePasswordConfiguration,
+        ManagedIdentityConfiguration,
+        ServicePrincipalConfiguration,
+        AccessKeyConfiguration,
+        ApiKeyConfiguration,
+        NoneCredentialConfiguration,
+        AccountKeyConfiguration,
+        AadCredentialConfiguration,
+    ]:
+        """Credentials for connection.
+
+        :return: Credentials for connection.
+        :rtype: Union[
+            ~azure.ai.ml.entities.PatTokenConfiguration,
+            ~azure.ai.ml.entities.SasTokenConfiguration,
+            ~azure.ai.ml.entities.UsernamePasswordConfiguration,
+            ~azure.ai.ml.entities.ManagedIdentityConfiguration
+            ~azure.ai.ml.entities.ServicePrincipalConfiguration,
+            ~azure.ai.ml.entities.AccessKeyConfiguration,
+            ~azure.ai.ml.entities.ApiKeyConfiguration
+            ~azure.ai.ml.entities.NoneCredentialConfiguration,
+            ~azure.ai.ml.entities.AccountKeyConfiguration,
+            ~azure.ai.ml.entities.AadCredentialConfiguration,
+            ]
+        """
+        return self._credentials
+
+    @property
+    def metadata(self) -> Optional[Dict[str, Any]]:
+        """The connection's metadata dictionary.
+        :return: This connection's metadata.
+        :rtype: Optional[Dict[str, Any]]
+        """
+        return self._metadata if self._metadata is not None else {}
+
+    @metadata.setter
+    def metadata(self, value: Optional[Dict[str, Any]]) -> None:
+        """Set the metadata for the connection. Be warned that setting this will override
+        ALL metadata values, including those implicitly set by certain connection types to manage their
+        extra data. Usually, you should probably access the metadata dictionary, then add or remove values
+        individually as needed.
+        :param value: The new metadata for connection.
+            This completely overwrites the existing metadata dictionary.
+        :type value: Optional[Dict[str, Any]]
+        """
+        if not value:
+            return
+        self._metadata = value
+
+    @property
+    def tags(self) -> Optional[Dict[str, Any]]:
+        """Deprecated. Use metadata instead.
+        :return: This connection's metadata.
+        :rtype: Optional[Dict[str, Any]]
+        """
+        return self._metadata if self._metadata is not None else {}
+
+    @tags.setter
+    def tags(self, value: Optional[Dict[str, Any]]) -> None:
+        """Deprecated use metadata instead
+        :param value: The new metadata for connection.
+            This completely overwrites the existing metadata dictionary.
+        :type value: Optional[Dict[str, Any]]
+        """
+        if not value:
+            return
+        self._metadata = value
+
+    @property
+    def is_shared(self) -> bool:
+        """Get the Boolean describing if this connection is shared amongst its cohort within a hub.
+        Only applicable for connections created within a project.
+
+        :rtype: bool
+        """
+        return self._is_shared
+
+    @is_shared.setter
+    def is_shared(self, value: bool) -> None:
+        """Assign the is_shared property of the connection, determining if it is shared amongst other projects
+        within its parent hub. Only applicable for connections created within a project.
+
+        :param value: The new is_shared value.
+        :type value: bool
+        """
+        if not value:
+            return
+        self._is_shared = value
+
+    def dump(self, dest: Union[str, PathLike, IO[AnyStr]], **kwargs: Any) -> None:
+        """Dump the connection spec into a file in yaml format.
+
+        :param dest: The destination to receive this connection's spec.
+            Must be either a path to a local file, or an already-open file stream.
+            If dest is a file path, a new file will be created,
+            and an exception is raised if the file exists.
+            If dest is an open file, the file will be written to directly,
+            and an exception will be raised if the file is not writable.
+        :type dest: Union[PathLike, str, IO[AnyStr]]
+        """
+        path = kwargs.pop("path", None)
+        yaml_serialized = self._to_dict()
+        dump_yaml_to_file(dest, yaml_serialized, default_flow_style=False, path=path, **kwargs)
+
+    @classmethod
+    def _load(
+        cls,
+        data: Optional[Dict] = None,
+        yaml_path: Optional[Union[PathLike, str]] = None,
+        params_override: Optional[list] = None,
+        **kwargs: Any,
+    ) -> "WorkspaceConnection":
+        data = data or {}
+        params_override = params_override or []
+        context = {
+            BASE_PATH_CONTEXT_KEY: Path(yaml_path).parent if yaml_path else Path("./"),
+            PARAMS_OVERRIDE_KEY: params_override,
+        }
+        return cls._load_from_dict(data=data, context=context, **kwargs)
+
+    @classmethod
+    def _load_from_dict(cls, data: Dict, context: Dict, **kwargs: Any) -> "WorkspaceConnection":
+        conn_type = data["type"] if "type" in data else None
+        schema_class = cls._get_schema_class_from_type(conn_type)
+        loaded_data: WorkspaceConnection = load_from_dict(schema_class, data, context, **kwargs)
+        return loaded_data
+
+    def _to_dict(self) -> Dict:
+        schema_class = WorkspaceConnection._get_schema_class_from_type(self.type)
+        # Not sure what this pylint complaint was about, probably due to the polymorphic
+        # tricks at play. Disabling since testing indicates no issue.
+        res: dict = schema_class(context={BASE_PATH_CONTEXT_KEY: "./"}).dump(self)
+        return res
+
+    @classmethod
+    def _from_rest_object(cls, rest_obj: RestWorkspaceConnection) -> "WorkspaceConnection":
+        conn_class = cls._get_entity_class_from_rest_obj(rest_obj)
+
+        popped_metadata = conn_class._get_required_metadata_fields()
+
+        rest_kwargs = cls._extract_kwargs_from_rest_obj(rest_obj=rest_obj, popped_metadata=popped_metadata)
+        # Check for alternative name for custom connection type (added for client clarity).
+        if rest_kwargs["type"].lower() == camel_to_snake(ConnectionCategory.CUSTOM_KEYS).lower():
+            rest_kwargs["type"] = ConnectionTypes.CUSTOM
+        if rest_kwargs["type"].lower() == camel_to_snake(ConnectionCategory.ADLS_GEN2).lower():
+            rest_kwargs["type"] = ConnectionTypes.AZURE_DATA_LAKE_GEN_2
+        target = rest_kwargs.get("target", "")
+        # This dumb code accomplishes 2 things.
+        # It ensures that sub-classes properly input their target, regardless of which
+        # arbitrary name they replace it with, while also still allowing our official
+        # client specs to list those inputs as 'required'
+        for target_name in CONNECTION_ALTERNATE_TARGET_NAMES:
+            rest_kwargs[target_name] = target
+        if rest_obj.properties.category == ConnectionCategory.AZURE_ONE_LAKE:
+            # The microsoft one lake connection uniquely has client-only inputs
+            # that aren't just an alternate name for the target.
+            # This sets those inputs, that way the initializer can still
+            # required those fields for users.
+            rest_kwargs["artifact"] = ""
+            rest_kwargs["one_lake_workspace_name"] = ""
+        if rest_obj.properties.category == ConnectionTypes.AI_SERVICES_REST_PLACEHOLDER:
+            # AI Services renames it's metadata field when surfaced to users and inputted
+            # into it's initializer for clarity. ResourceId doesn't really tell much on its own.
+            # No default in pop, this should fail if we somehow don't get a resource ID
+            rest_kwargs["ai_services_resource_id"] = rest_kwargs.pop(camel_to_snake(CONNECTION_RESOURCE_ID_KEY))
+        connection = conn_class(**rest_kwargs)
+        return cast(WorkspaceConnection, connection)
+
+    def _validate(self) -> str:
+        return str(self.name)
+
+    def _to_rest_object(self) -> RestWorkspaceConnection:
+        connection_properties_class: Any = NoneAuthTypeWorkspaceConnectionProperties
+        if self._credentials:
+            connection_properties_class = self._credentials._get_rest_properties_class()
+        # Convert from human readable type to corresponding api enum if needed.
+        conn_type = self.type
+        if conn_type == ConnectionTypes.CUSTOM:
+            conn_type = ConnectionCategory.CUSTOM_KEYS
+        elif conn_type == ConnectionTypes.AZURE_DATA_LAKE_GEN_2:
+            conn_type = ConnectionCategory.ADLS_GEN2
+        elif conn_type in {
+            ConnectionTypes.AZURE_CONTENT_SAFETY,
+            ConnectionTypes.AZURE_SPEECH_SERVICES,
+        }:
+            conn_type = ConnectionCategory.COGNITIVE_SERVICE
+        elif conn_type == ConnectionTypes.AZURE_SEARCH:
+            conn_type = ConnectionCategory.COGNITIVE_SEARCH
+        elif conn_type == ConnectionTypes.AZURE_AI_SERVICES:
+            # ConnectionCategory.AI_SERVICES category accidentally unpublished
+            conn_type = ConnectionTypes.AI_SERVICES_REST_PLACEHOLDER
+        # Some credential property bags have no credential input.
+        if connection_properties_class in {
+            NoneAuthTypeWorkspaceConnectionProperties,
+            AADAuthTypeWorkspaceConnectionProperties,
+        }:
+            properties = connection_properties_class(
+                target=self.target,
+                metadata=self.metadata,
+                category=_snake_to_camel(conn_type),
+                is_shared_to_all=self.is_shared,
+            )
+        else:
+            properties = connection_properties_class(
+                target=self.target,
+                credentials=self.credentials._to_workspace_connection_rest_object() if self._credentials else None,
+                metadata=self.metadata,
+                category=_snake_to_camel(conn_type),
+                is_shared_to_all=self.is_shared,
+            )
+
+        return RestWorkspaceConnection(properties=properties)
+
+    @classmethod
+    def _extract_kwargs_from_rest_obj(
+        cls, rest_obj: RestWorkspaceConnection, popped_metadata: List[str]
+    ) -> Dict[str, str]:
+        """Internal helper function with extracts all the fields needed to initialize a connection object
+        from its associated restful object. Pulls extra fields based on the supplied `popped_metadata` input.
+        Returns all the fields as a dictionary, which is expected to then be supplied to a
+        connection initializer as kwargs.
+
+        :param rest_obj: The rest object representation of a connection
+        :type rest_obj: RestWorkspaceConnection
+        :param popped_metadata: Key names that should be pulled from the rest object's metadata and
+            injected as top-level fields into the client connection's initializer.
+            This is needed for subclasses that require extra inputs compared to the base Connection class.
+        :type popped_metadata: List[str]
+
+        :return: A dictionary containing all kwargs needed to construct a connection.
+        :rtype: Dict[str, str]
+        """
+        properties = rest_obj.properties
+        credentials: Any = NoneCredentialConfiguration()
+
+        credentials_class = _BaseIdentityConfiguration._get_credential_class_from_rest_type(properties.auth_type)
+        # None and AAD auth types have a property bag class, but no credentials inside that.
+        # Thankfully they both have no inputs.
+
+        if credentials_class is AadCredentialConfiguration:
+            credentials = AadCredentialConfiguration()
+        elif credentials_class is not NoneCredentialConfiguration:
+            credentials = credentials_class._from_workspace_connection_rest_object(properties.credentials)
+
+        metadata = properties.metadata if hasattr(properties, "metadata") else {}
+        rest_kwargs = {
+            "id": rest_obj.id,
+            "name": rest_obj.name,
+            "target": properties.target,
+            "creation_context": SystemData._from_rest_object(rest_obj.system_data) if rest_obj.system_data else None,
+            "type": camel_to_snake(properties.category),
+            "credentials": credentials,
+            "metadata": metadata,
+            "is_shared": properties.is_shared_to_all if hasattr(properties, "is_shared_to_all") else True,
+        }
+
+        for name in popped_metadata:
+            if name in metadata:
+                rest_kwargs[camel_to_snake(name)] = metadata[name]
+        return rest_kwargs
+
+    @classmethod
+    def _get_entity_class_from_type(cls, type: str) -> Type:
+        """Helper function that derives the correct connection class given the client or server type.
+        Differs slightly from the rest object version in that it doesn't need to account for
+        rest object metadata.
+
+        This reason there are two functions at all is due to certain API connection types that
+        are obfuscated with different names when presented to the client. These types are
+        accounted for in the ConnectionTypes class in the constants file.
+
+        :param type: The type string describing the connection.
+        :type type: str
+
+        :return: Theconnection class the conn_type corresponds to.
+        :rtype: Type
+        """
+        from .connection_subtypes import (
+            AzureBlobStoreConnection,
+            MicrosoftOneLakeConnection,
+            AzureOpenAIConnection,
+            AzureAIServicesConnection,
+            AzureAISearchConnection,
+            AzureContentSafetyConnection,
+            AzureSpeechServicesConnection,
+            APIKeyConnection,
+            OpenAIConnection,
+            SerpConnection,
+            ServerlessConnection,
+        )
+
+        conn_type = _snake_to_camel(type).lower()
+        if conn_type is None:
+            return WorkspaceConnection
+
+        # Connection categories don't perfectly follow perfect camel casing, so lower
+        # case everything to avoid problems.
+        CONNECTION_CATEGORY_TO_SUBCLASS_MAP = {
+            ConnectionCategory.AZURE_OPEN_AI.lower(): AzureOpenAIConnection,
+            ConnectionCategory.AZURE_BLOB.lower(): AzureBlobStoreConnection,
+            ConnectionCategory.AZURE_ONE_LAKE.lower(): MicrosoftOneLakeConnection,
+            ConnectionCategory.API_KEY.lower(): APIKeyConnection,
+            ConnectionCategory.OPEN_AI.lower(): OpenAIConnection,
+            ConnectionCategory.SERP.lower(): SerpConnection,
+            ConnectionCategory.SERVERLESS.lower(): ServerlessConnection,
+            _snake_to_camel(ConnectionTypes.AZURE_CONTENT_SAFETY).lower(): AzureContentSafetyConnection,
+            _snake_to_camel(ConnectionTypes.AZURE_SPEECH_SERVICES).lower(): AzureSpeechServicesConnection,
+            ConnectionCategory.COGNITIVE_SEARCH.lower(): AzureAISearchConnection,
+            _snake_to_camel(ConnectionTypes.AZURE_SEARCH).lower(): AzureAISearchConnection,
+            _snake_to_camel(ConnectionTypes.AZURE_AI_SERVICES).lower(): AzureAIServicesConnection,
+            ConnectionTypes.AI_SERVICES_REST_PLACEHOLDER.lower(): AzureAIServicesConnection,
+        }
+        return CONNECTION_CATEGORY_TO_SUBCLASS_MAP.get(conn_type, WorkspaceConnection)
+
+    @classmethod
+    def _get_entity_class_from_rest_obj(cls, rest_obj: RestWorkspaceConnection) -> Type:
+        """Helper function that converts a restful connection into the associated
+         connection class or subclass. Accounts for potential snake/camel case and
+        capitalization differences in the type, and sub-typing derived from metadata.
+
+        :param rest_obj: The rest object representation of the connection to derive a class from.
+        :type rest_obj: RestWorkspaceConnection
+
+        :return: The  connection class the conn_type corresponds to.
+        :rtype: Type
+        """
+        conn_type = rest_obj.properties.category
+        conn_type = _snake_to_camel(conn_type).lower()
+        if conn_type is None:
+            return WorkspaceConnection
+
+        # Imports are done here to avoid circular imports on load.
+        from .connection_subtypes import (
+            AzureContentSafetyConnection,
+            AzureSpeechServicesConnection,
+        )
+
+        # Cognitive search connections have further subdivisions based on the kind of service.
+        if (
+            conn_type == ConnectionCategory.COGNITIVE_SERVICE.lower()
+            and hasattr(rest_obj.properties, "metadata")
+            and rest_obj.properties.metadata is not None
+        ):
+            kind = rest_obj.properties.metadata.get(CONNECTION_KIND_KEY, "").lower()
+            if kind == CognitiveServiceKinds.CONTENT_SAFETY.lower():
+                return AzureContentSafetyConnection
+            if kind == CognitiveServiceKinds.SPEECH.lower():
+                return AzureSpeechServicesConnection
+            return WorkspaceConnection
+
+        return cls._get_entity_class_from_type(type=conn_type)
+
+    @classmethod
+    def _get_schema_class_from_type(cls, conn_type: Optional[str]) -> Type:
+        """Helper function that converts a rest client connection category into the associated
+        connection schema class or subclass. Accounts for potential snake/camel case and
+        capitalization differences.
+
+        :param conn_type: The connection type.
+        :type conn_type: str
+
+        :return: The  connection schema class the conn_type corresponds to.
+        :rtype: Type
+        """
+        if conn_type is None:
+            return WorkspaceConnectionSchema
+        entity_class = cls._get_entity_class_from_type(conn_type)
+        return entity_class._get_schema_class()
+
+    @classmethod
+    def _get_required_metadata_fields(cls) -> List[str]:
+        """Helper function that returns the required metadata fields for specific
+        connection type. This parent function returns nothing, but needs to be overwritten by child
+        classes, which are created under the expectation that they have extra fields that need to be
+        accounted for.
+
+        :return: A list of the required metadata fields for the specific connection type.
+        :rtype: List[str]
+        """
+        return []
+
+    @classmethod
+    def _get_schema_class(cls) -> Type:
+        """Helper function that maps this class to its associated schema class. Needs to be overridden by
+        child classes to allow the base class to be polymorphic in its schema reading.
+
+        :return: The appropriate schema class to use with this entity class.
+        :rtype: Type
+        """
+        return WorkspaceConnectionSchema
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_workspace/customer_managed_key.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_workspace/customer_managed_key.py
new file mode 100644
index 00000000..88474dab
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_workspace/customer_managed_key.py
@@ -0,0 +1,48 @@
+# ---------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# ---------------------------------------------------------
+
+
+from typing import Optional
+
+
+class CustomerManagedKey:
+    """Key vault details for encrypting data with customer-managed keys.
+
+    :param key_vault: Key vault that is holding the customer-managed key.
+    :type key_vault: str
+    :param key_uri: URI for the customer-managed key.
+    :type key_uri: str
+    :param cosmosdb_id: ARM id of bring-your-own cosmosdb account that customer brings
+        to store customer's data with encryption.
+    :type cosmosdb_id: str
+    :param storage_id: ARM id of bring-your-own storage account that customer brings
+        to store customer's data with encryption.
+    :type storage_id: str
+    :param search_id: ARM id of bring-your-own search account that customer brings
+        to store customer's data with encryption.
+    :type search_id: str
+
+    .. admonition:: Example:
+
+        .. literalinclude:: ../samples/ml_samples_workspace.py
+            :start-after: [START customermanagedkey]
+            :end-before: [END customermanagedkey]
+            :language: python
+            :dedent: 8
+            :caption: Creating a CustomerManagedKey object.
+    """
+
+    def __init__(
+        self,
+        key_vault: Optional[str] = None,
+        key_uri: Optional[str] = None,
+        cosmosdb_id: Optional[str] = None,
+        storage_id: Optional[str] = None,
+        search_id: Optional[str] = None,
+    ):
+        self.key_vault = key_vault
+        self.key_uri = key_uri
+        self.cosmosdb_id = cosmosdb_id or ""
+        self.storage_id = storage_id or ""
+        self.search_id = search_id or ""
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_workspace/diagnose.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_workspace/diagnose.py
new file mode 100644
index 00000000..fa923dc4
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_workspace/diagnose.py
@@ -0,0 +1,214 @@
+# ---------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# ---------------------------------------------------------
+
+import json
+from typing import Any, Dict, List, Optional
+
+from azure.ai.ml._restclient.v2024_10_01_preview.models import (
+    DiagnoseRequestProperties as RestDiagnoseRequestProperties,
+)
+from azure.ai.ml._restclient.v2024_10_01_preview.models import DiagnoseResponseResult as RestDiagnoseResponseResult
+from azure.ai.ml._restclient.v2024_10_01_preview.models import (
+    DiagnoseResponseResultValue as RestDiagnoseResponseResultValue,
+)
+from azure.ai.ml._restclient.v2024_10_01_preview.models import DiagnoseResult as RestDiagnoseResult
+from azure.ai.ml._restclient.v2024_10_01_preview.models import (
+    DiagnoseWorkspaceParameters as RestDiagnoseWorkspaceParameters,
+)
+
+
+class DiagnoseRequestProperties:
+    """DiagnoseRequestProperties."""
+
+    def __init__(
+        self,
+        *,
+        udr: Optional[Dict[str, Any]] = None,
+        nsg: Optional[Dict[str, Any]] = None,
+        resource_lock: Optional[Dict[str, Any]] = None,
+        dns_resolution: Optional[Dict[str, Any]] = None,
+        storage_account: Optional[Dict[str, Any]] = None,
+        key_vault: Optional[Dict[str, Any]] = None,
+        container_registry: Optional[Dict[str, Any]] = None,
+        application_insights: Optional[Dict[str, Any]] = None,
+        others: Optional[Dict[str, Any]] = None,
+    ):
+        self.udr = udr
+        self.nsg = nsg
+        self.resource_lock = resource_lock
+        self.dns_resolution = dns_resolution
+        self.storage_account = storage_account
+        self.key_vault = key_vault
+        self.container_registry = container_registry
+        self.application_insights = application_insights
+        self.others = others
+
+    @classmethod
+    def _from_rest_object(cls, rest_obj: RestDiagnoseRequestProperties) -> "DiagnoseRequestProperties":
+        return cls(
+            udr=rest_obj.udr,
+            nsg=rest_obj.nsg,
+            resource_lock=rest_obj.resource_lock,
+            dns_resolution=rest_obj.dns_resolution,
+            storage_account=rest_obj.storage_account,
+            key_vault=rest_obj.key_vault,
+            container_registry=rest_obj.container_registry,
+            application_insights=rest_obj.application_insights,
+            others=rest_obj.others,
+        )
+
+    def _to_rest_object(self) -> RestDiagnoseRequestProperties:
+        return RestDiagnoseRequestProperties(
+            udr=self.udr,
+            nsg=self.nsg,
+            resource_lock=self.resource_lock,
+            dns_resolution=self.dns_resolution,
+            storage_account=self.storage_account,
+            key_vault=self.key_vault,
+            container_registry=self.container_registry,
+            application_insights=self.application_insights,
+            others=self.others,
+        )
+
+
+class DiagnoseResponseResult:
+    """DiagnoseResponseResult."""
+
+    def __init__(
+        self,
+        *,
+        value: Optional["DiagnoseResponseResultValue"] = None,
+    ):
+        self.value = value
+
+    @classmethod
+    def _from_rest_object(cls, rest_obj: RestDiagnoseResponseResult) -> "DiagnoseResponseResult":
+        val = None
+        if rest_obj and rest_obj.value and isinstance(rest_obj.value, RestDiagnoseResponseResultValue):
+            # pylint: disable=protected-access
+            val = DiagnoseResponseResultValue._from_rest_object(rest_obj.value)
+        return cls(value=val)
+
+    def _to_rest_object(self) -> RestDiagnoseResponseResult:
+        return RestDiagnoseResponseResult(value=self.value)
+
+
+class DiagnoseResponseResultValue:
+    """DiagnoseResponseResultValue."""
+
+    def __init__(
+        self,
+        *,
+        user_defined_route_results: Optional[List["DiagnoseResult"]] = None,
+        network_security_rule_results: Optional[List["DiagnoseResult"]] = None,
+        resource_lock_results: Optional[List["DiagnoseResult"]] = None,
+        dns_resolution_results: Optional[List["DiagnoseResult"]] = None,
+        storage_account_results: Optional[List["DiagnoseResult"]] = None,
+        key_vault_results: Optional[List["DiagnoseResult"]] = None,
+        container_registry_results: Optional[List["DiagnoseResult"]] = None,
+        application_insights_results: Optional[List["DiagnoseResult"]] = None,
+        other_results: Optional[List["DiagnoseResult"]] = None,
+    ):
+        self.user_defined_route_results = user_defined_route_results
+        self.network_security_rule_results = network_security_rule_results
+        self.resource_lock_results = resource_lock_results
+        self.dns_resolution_results = dns_resolution_results
+        self.storage_account_results = storage_account_results
+        self.key_vault_results = key_vault_results
+        self.container_registry_results = container_registry_results
+        self.application_insights_results = application_insights_results
+        self.other_results = other_results
+
+    @classmethod
+    def _from_rest_object(cls, rest_obj: RestDiagnoseResponseResultValue) -> "DiagnoseResponseResultValue":
+        return cls(
+            user_defined_route_results=rest_obj.user_defined_route_results,
+            network_security_rule_results=rest_obj.network_security_rule_results,
+            resource_lock_results=rest_obj.resource_lock_results,
+            dns_resolution_results=rest_obj.dns_resolution_results,
+            storage_account_results=rest_obj.storage_account_results,
+            key_vault_results=rest_obj.key_vault_results,
+            container_registry_results=rest_obj.container_registry_results,
+            application_insights_results=rest_obj.application_insights_results,
+            other_results=rest_obj.other_results,
+        )
+
+    def _to_rest_object(self) -> RestDiagnoseResponseResultValue:
+        return RestDiagnoseResponseResultValue(
+            user_defined_route_results=self.user_defined_route_results,
+            network_security_rule_results=self.network_security_rule_results,
+            resource_lock_results=self.resource_lock_results,
+            dns_resolution_results=self.dns_resolution_results,
+            storage_account_results=self.storage_account_results,
+            key_vault_results=self.key_vault_results,
+            container_registry_results=self.container_registry_results,
+            application_insights_results=self.application_insights_results,
+            other_results=self.other_results,
+        )
+
+    def __json__(self):
+        results = self.__dict__.copy()
+        for k, v in results.items():
+            results[k] = [item.__dict__ for item in v]
+        return results
+
+    def __str__(self) -> str:
+        return json.dumps(self, default=lambda o: o.__json__(), indent=2)
+
+
+class DiagnoseResult:
+    """Result of Diagnose."""
+
+    def __init__(
+        self,
+        *,
+        code: Optional[str] = None,
+        level: Optional[str] = None,
+        message: Optional[str] = None,
+    ):
+        self.code = code
+        self.level = level
+        self.message = message
+
+    @classmethod
+    def _from_rest_object(cls, rest_obj: RestDiagnoseResult) -> "DiagnoseResult":
+        return cls(
+            code=rest_obj.code,
+            level=rest_obj.level,
+            message=rest_obj.message,
+        )
+
+    def _to_rest_object(self) -> RestDiagnoseResult:
+        return RestDiagnoseResult(
+            code=self.code,
+            level=self.level,
+            message=self.message,
+        )
+
+
+class DiagnoseWorkspaceParameters:
+    """Parameters to diagnose a workspace."""
+
+    def __init__(
+        self,
+        *,
+        value: Optional["DiagnoseRequestProperties"] = None,
+    ):
+        self.value = value
+
+    @classmethod
+    def _from_rest_object(cls, rest_obj: RestDiagnoseWorkspaceParameters) -> "DiagnoseWorkspaceParameters":
+        val = None
+        if rest_obj.value and isinstance(rest_obj.value, DiagnoseRequestProperties):
+            # TODO: Bug Item number: 2883283
+            # pylint: disable=protected-access
+            val = rest_obj.value._from_rest_object()  # type: ignore
+        return cls(value=val)
+
+    def _to_rest_object(self) -> RestDiagnoseWorkspaceParameters:
+        val = None
+        if self.value and isinstance(self.value, DiagnoseRequestProperties):
+            # pylint: disable=protected-access
+            val = self.value._to_rest_object()
+        return RestDiagnoseWorkspaceParameters(value=val)
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_workspace/feature_store_settings.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_workspace/feature_store_settings.py
new file mode 100644
index 00000000..8c264db0
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_workspace/feature_store_settings.py
@@ -0,0 +1,61 @@
+# ---------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# ---------------------------------------------------------
+
+# pylint: disable=protected-access
+
+from typing import Optional
+
+from azure.ai.ml._restclient.v2024_10_01_preview.models import FeatureStoreSettings as RestFeatureStoreSettings
+from azure.ai.ml.entities._mixins import RestTranslatableMixin
+
+from .compute_runtime import ComputeRuntime
+
+
+class FeatureStoreSettings(RestTranslatableMixin):
+    """Feature Store Settings
+
+    :param compute_runtime: The spark compute runtime settings. defaults to None.
+    :type compute_runtime: Optional[~compute_runtime.ComputeRuntime]
+    :param offline_store_connection_name: The offline store connection name. Defaults to None.
+    :type offline_store_connection_name: Optional[str]
+    :param online_store_connection_name: The online store connection name. Defaults to None.
+    :type online_store_connection_name: Optional[str]
+
+    .. admonition:: Example:
+
+        .. literalinclude:: ../samples/ml_samples_featurestore.py
+            :start-after: [START configure_feature_store_settings]
+            :end-before: [END configure_feature_store_settings]
+            :language: python
+            :dedent: 8
+            :caption: Instantiating FeatureStoreSettings
+    """
+
+    def __init__(
+        self,
+        *,
+        compute_runtime: Optional[ComputeRuntime] = None,
+        offline_store_connection_name: Optional[str] = None,
+        online_store_connection_name: Optional[str] = None,
+    ) -> None:
+        self.compute_runtime = compute_runtime if compute_runtime else ComputeRuntime(spark_runtime_version="3.4.0")
+        self.offline_store_connection_name = offline_store_connection_name
+        self.online_store_connection_name = online_store_connection_name
+
+    def _to_rest_object(self) -> RestFeatureStoreSettings:
+        return RestFeatureStoreSettings(
+            compute_runtime=ComputeRuntime._to_rest_object(self.compute_runtime),
+            offline_store_connection_name=self.offline_store_connection_name,
+            online_store_connection_name=self.online_store_connection_name,
+        )
+
+    @classmethod
+    def _from_rest_object(cls, obj: RestFeatureStoreSettings) -> Optional["FeatureStoreSettings"]:
+        if not obj:
+            return None
+        return FeatureStoreSettings(
+            compute_runtime=ComputeRuntime._from_rest_object(obj.compute_runtime),
+            offline_store_connection_name=obj.offline_store_connection_name,
+            online_store_connection_name=obj.online_store_connection_name,
+        )
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_workspace/network_acls.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_workspace/network_acls.py
new file mode 100644
index 00000000..fbb3b9ef
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_workspace/network_acls.py
@@ -0,0 +1,90 @@
+# ---------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# ---------------------------------------------------------
+
+from typing import List, Optional
+
+from azure.ai.ml._restclient.v2024_10_01_preview.models import IPRule as RestIPRule
+from azure.ai.ml._restclient.v2024_10_01_preview.models import NetworkAcls as RestNetworkAcls
+from azure.ai.ml.entities._mixins import RestTranslatableMixin
+
+
+class IPRule(RestTranslatableMixin):
+    """Represents an IP rule with a value.
+
+    :param value: An IPv4 address or range in CIDR notation.
+    :type value: str
+    """
+
+    def __init__(self, value: Optional[str]):
+        self.value = value
+
+    def __repr__(self):
+        return f"IPRule(value={self.value})"
+
+    def _to_rest_object(self) -> RestIPRule:
+        return RestIPRule(value=self.value)
+
+    @classmethod
+    def _from_rest_object(cls, obj: RestIPRule) -> "IPRule":
+        return cls(value=obj.value)
+
+
+class DefaultActionType:
+    """Specifies the default action when no IP rules are matched."""
+
+    DENY = "Deny"
+    ALLOW = "Allow"
+
+
+class NetworkAcls(RestTranslatableMixin):
+    """Network Access Setting for Workspace
+
+    :param default_action: Specifies the default action when no IP rules are matched.
+    :type default_action: str
+    :param ip_rules: Rules governing the accessibility of a resource from a specific IP address or IP range.
+    :type ip_rules: Optional[List[IPRule]]
+
+    .. admonition:: Example:
+
+        .. literalinclude:: ../samples/ml_samples_workspace.py
+                :start-after: [START workspace_network_access_settings]
+                :end-before: [END workspace_network_access_settings]
+                :language: python
+                :dedent: 8
+                :caption: Configuring one of the three public network access settings.
+    """
+
+    def __init__(
+        self,
+        *,
+        default_action: str = DefaultActionType.ALLOW,
+        ip_rules: Optional[List[IPRule]] = None,
+    ):
+        self.default_action = default_action
+        self.ip_rules = ip_rules if ip_rules is not None else []
+
+    def __repr__(self):
+        ip_rules_repr = ", ".join(repr(ip_rule) for ip_rule in self.ip_rules)
+        return f"NetworkAcls(default_action={self.default_action}, ip_rules=[{ip_rules_repr}])"
+
+    def _to_rest_object(self) -> RestNetworkAcls:
+        return RestNetworkAcls(
+            default_action=self.default_action,
+            ip_rules=(
+                [ip_rule._to_rest_object() for ip_rule in self.ip_rules]  # pylint: disable=protected-access
+                if self.ip_rules
+                else None
+            ),
+        )
+
+    @classmethod
+    def _from_rest_object(cls, obj: RestNetworkAcls) -> "NetworkAcls":
+        return cls(
+            default_action=obj.default_action,
+            ip_rules=(
+                [IPRule._from_rest_object(ip_rule) for ip_rule in obj.ip_rules]  # pylint: disable=protected-access
+                if obj.ip_rules
+                else []
+            ),
+        )
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_workspace/networking.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_workspace/networking.py
new file mode 100644
index 00000000..4576eac9
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_workspace/networking.py
@@ -0,0 +1,348 @@
+# ---------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# ---------------------------------------------------------
+
+from abc import ABC
+from typing import Any, Dict, List, Optional
+
+from azure.ai.ml._restclient.v2024_10_01_preview.models import FqdnOutboundRule as RestFqdnOutboundRule
+from azure.ai.ml._restclient.v2024_10_01_preview.models import (
+    ManagedNetworkProvisionStatus as RestManagedNetworkProvisionStatus,
+)
+from azure.ai.ml._restclient.v2024_10_01_preview.models import ManagedNetworkSettings as RestManagedNetwork
+from azure.ai.ml._restclient.v2024_10_01_preview.models import (
+    PrivateEndpointDestination as RestPrivateEndpointOutboundRuleDestination,
+)
+from azure.ai.ml._restclient.v2024_10_01_preview.models import (
+    PrivateEndpointOutboundRule as RestPrivateEndpointOutboundRule,
+)
+from azure.ai.ml._restclient.v2024_10_01_preview.models import (
+    ServiceTagDestination as RestServiceTagOutboundRuleDestination,
+)
+from azure.ai.ml._restclient.v2024_10_01_preview.models import ServiceTagOutboundRule as RestServiceTagOutboundRule
+from azure.ai.ml.constants._workspace import IsolationMode, OutboundRuleCategory, OutboundRuleType
+
+
+class OutboundRule(ABC):
+    """Base class for Outbound Rules, cannot be instantiated directly. Please see FqdnDestination,
+    PrivateEndpointDestination, and ServiceTagDestination objects to create outbound rules.
+
+    :param name: Name of the outbound rule.
+    :type name: str
+    :param type: Type of the outbound rule. Supported types are "FQDN", "PrivateEndpoint", "ServiceTag"
+    :type type: str
+    :ivar type: Type of the outbound rule. Supported types are "FQDN", "PrivateEndpoint", "ServiceTag"
+    :vartype type: str
+    """
+
+    def __init__(
+        self,
+        *,
+        name: Optional[str] = None,
+        **kwargs: Any,
+    ) -> None:
+        self.name = name
+        self.parent_rule_names = kwargs.pop("parent_rule_names", None)
+        self.type = kwargs.pop("type", None)
+        self.category = kwargs.pop("category", OutboundRuleCategory.USER_DEFINED)
+        self.status = kwargs.pop("status", None)
+
+    @classmethod
+    def _from_rest_object(cls, rest_obj: Any, name: str) -> Optional["OutboundRule"]:
+        if isinstance(rest_obj, RestFqdnOutboundRule):
+            rule_fqdnDestination = FqdnDestination(destination=rest_obj.destination, name=name)
+            rule_fqdnDestination.category = rest_obj.category
+            rule_fqdnDestination.status = rest_obj.status
+            return rule_fqdnDestination
+        if isinstance(rest_obj, RestPrivateEndpointOutboundRule):
+            rule_privateEndpointDestination = PrivateEndpointDestination(
+                service_resource_id=rest_obj.destination.service_resource_id,
+                subresource_target=rest_obj.destination.subresource_target,
+                spark_enabled=rest_obj.destination.spark_enabled,
+                fqdns=rest_obj.fqdns,
+                name=name,
+            )
+            rule_privateEndpointDestination.category = rest_obj.category
+            rule_privateEndpointDestination.status = rest_obj.status
+            return rule_privateEndpointDestination
+        if isinstance(rest_obj, RestServiceTagOutboundRule):
+            rule = ServiceTagDestination(
+                service_tag=rest_obj.destination.service_tag,
+                protocol=rest_obj.destination.protocol,
+                port_ranges=rest_obj.destination.port_ranges,
+                address_prefixes=rest_obj.destination.address_prefixes,
+                name=name,
+            )
+            rule.category = rest_obj.category
+            rule.status = rest_obj.status
+            return rule
+
+        return None
+
+
+class FqdnDestination(OutboundRule):
+    """Class representing a FQDN outbound rule.
+
+    :param name: Name of the outbound rule.
+    :type name: str
+    :param destination: Fully qualified domain name to which outbound connections are allowed.
+        For example: “xxxxxx.contoso.com”.
+    :type destination: str
+    :ivar type: Type of the outbound rule. Set to "FQDN" for this class.
+    :vartype type: str
+
+    .. literalinclude:: ../samples/ml_samples_workspace.py
+            :start-after: [START fqdn_outboundrule]
+            :end-before: [END fqdn_outboundrule]
+            :language: python
+            :dedent: 8
+            :caption: Creating a FqdnDestination outbound rule object.
+    """
+
+    def __init__(self, *, name: str, destination: str, **kwargs: Any) -> None:
+        self.destination = destination
+        OutboundRule.__init__(self, type=OutboundRuleType.FQDN, name=name, **kwargs)
+
+    def _to_rest_object(self) -> RestFqdnOutboundRule:
+        return RestFqdnOutboundRule(type=self.type, category=self.category, destination=self.destination)
+
+    def _to_dict(self) -> Dict:
+        return {
+            "name": self.name,
+            "type": OutboundRuleType.FQDN,
+            "category": self.category,
+            "destination": self.destination,
+            "status": self.status,
+        }
+
+
+class PrivateEndpointDestination(OutboundRule):
+    """Class representing a Private Endpoint outbound rule.
+
+    :param name: Name of the outbound rule.
+    :type name: str
+    :param service_resource_id: The resource URI of the root service that supports creation of the private link.
+    :type service_resource_id: str
+    :param subresource_target: The target endpoint of the subresource of the service.
+    :type subresource_target: str
+    :param spark_enabled: Indicates if the private endpoint can be used for Spark jobs, default is “false”.
+    :type spark_enabled: bool
+    :param fqdns: String list of FQDNs particular to the Private Endpoint resource creation. For application
+        gateway Private Endpoints, this is the FQDN which will resolve to the private IP of the application
+        gateway PE inside the workspace's managed network.
+    :type fqdns: List[str]
+    :ivar type: Type of the outbound rule. Set to "PrivateEndpoint" for this class.
+    :vartype type: str
+
+    .. literalinclude:: ../samples/ml_samples_workspace.py
+            :start-after: [START private_endpoint_outboundrule]
+            :end-before: [END private_endpoint_outboundrule]
+            :language: python
+            :dedent: 8
+            :caption: Creating a PrivateEndpointDestination outbound rule object.
+    """
+
+    def __init__(
+        self,
+        *,
+        name: str,
+        service_resource_id: str,
+        subresource_target: str,
+        spark_enabled: bool = False,
+        fqdns: Optional[List[str]] = None,
+        **kwargs: Any,
+    ) -> None:
+        self.service_resource_id = service_resource_id
+        self.subresource_target = subresource_target
+        self.spark_enabled = spark_enabled
+        self.fqdns = fqdns
+        OutboundRule.__init__(self, type=OutboundRuleType.PRIVATE_ENDPOINT, name=name, **kwargs)
+
+    def _to_rest_object(self) -> RestPrivateEndpointOutboundRule:
+        return RestPrivateEndpointOutboundRule(
+            type=self.type,
+            category=self.category,
+            destination=RestPrivateEndpointOutboundRuleDestination(
+                service_resource_id=self.service_resource_id,
+                subresource_target=self.subresource_target,
+                spark_enabled=self.spark_enabled,
+            ),
+            fqdns=self.fqdns,
+        )
+
+    def _to_dict(self) -> Dict:
+        return {
+            "name": self.name,
+            "type": OutboundRuleType.PRIVATE_ENDPOINT,
+            "category": self.category,
+            "destination": {
+                "service_resource_id": self.service_resource_id,
+                "subresource_target": self.subresource_target,
+                "spark_enabled": self.spark_enabled,
+            },
+            "fqdns": self.fqdns,
+            "status": self.status,
+        }
+
+
+class ServiceTagDestination(OutboundRule):
+    """Class representing a Service Tag outbound rule.
+
+    :param name: Name of the outbound rule.
+    :type name: str
+    :param service_tag: Service Tag of an Azure service, maps to predefined IP addresses for its service endpoints.
+    :type service_tag: str
+    :param protocol: Allowed transport protocol, can be "TCP", "UDP", "ICMP" or "*" for all supported protocols.
+    :type protocol: str
+    :param port_ranges: A comma-separated list of single ports and/or range of ports, such as "80,1024-65535".
+        Traffics should be allowed to these port ranges.
+    :type port_ranges: str
+    :param address_prefixes: Optional list of CIDR prefixes or IP ranges, when provided, service_tag argument will
+        be ignored and address_prefixes will be used instead.
+    :type address_prefixes: List[str]
+    :ivar type: Type of the outbound rule. Set to "ServiceTag" for this class.
+    :vartype type: str
+
+    .. literalinclude:: ../samples/ml_samples_workspace.py
+            :start-after: [START service_tag_outboundrule]
+            :end-before: [END service_tag_outboundrule]
+            :language: python
+            :dedent: 8
+            :caption: Creating a ServiceTagDestination outbound rule object.
+    """
+
+    def __init__(
+        self,
+        *,
+        name: str,
+        protocol: str,
+        port_ranges: str,
+        service_tag: Optional[str] = None,
+        address_prefixes: Optional[List[str]] = None,
+        **kwargs: Any,
+    ) -> None:
+        self.service_tag = service_tag
+        self.protocol = protocol
+        self.port_ranges = port_ranges
+        self.address_prefixes = address_prefixes
+        OutboundRule.__init__(self, type=OutboundRuleType.SERVICE_TAG, name=name, **kwargs)
+
+    def _to_rest_object(self) -> RestServiceTagOutboundRule:
+        return RestServiceTagOutboundRule(
+            type=self.type,
+            category=self.category,
+            destination=RestServiceTagOutboundRuleDestination(
+                service_tag=self.service_tag,
+                protocol=self.protocol,
+                port_ranges=self.port_ranges,
+                address_prefixes=self.address_prefixes,
+            ),
+        )
+
+    def _to_dict(self) -> Dict:
+        return {
+            "name": self.name,
+            "type": OutboundRuleType.SERVICE_TAG,
+            "category": self.category,
+            "destination": {
+                "service_tag": self.service_tag,
+                "protocol": self.protocol,
+                "port_ranges": self.port_ranges,
+                "address_prefixes": self.address_prefixes,
+            },
+            "status": self.status,
+        }
+
+
+class ManagedNetwork:
+    """Managed Network settings for a workspace.
+
+    :param isolation_mode: Isolation of the managed network, defaults to Disabled.
+    :type isolation_mode: str
+    :param firewall_sku: Firewall Sku for FQDN rules in AllowOnlyApprovedOutbound..
+    :type firewall_sku: str
+    :param outbound_rules: List of outbound rules for the managed network.
+    :type outbound_rules: List[~azure.ai.ml.entities.OutboundRule]
+    :param network_id: Network id for the managed network, not meant to be set by user.
+    :type network_id: str
+
+    .. literalinclude:: ../samples/ml_samples_workspace.py
+            :start-after: [START workspace_managed_network]
+            :end-before: [END workspace_managed_network]
+            :language: python
+            :dedent: 8
+            :caption: Creating a ManagedNetwork object with one of each rule type.
+    """
+
+    def __init__(
+        self,
+        *,
+        isolation_mode: str = IsolationMode.DISABLED,
+        outbound_rules: Optional[List[OutboundRule]] = None,
+        firewall_sku: Optional[str] = None,
+        network_id: Optional[str] = None,
+        **kwargs: Any,
+    ) -> None:
+        self.isolation_mode = isolation_mode
+        self.firewall_sku = firewall_sku
+        self.network_id = network_id
+        self.outbound_rules = outbound_rules
+        self.status = kwargs.pop("status", None)
+
+    def _to_rest_object(self) -> RestManagedNetwork:
+        rest_outbound_rules = (
+            {
+                # pylint: disable=protected-access
+                outbound_rule.name: outbound_rule._to_rest_object()  # type: ignore[attr-defined]
+                for outbound_rule in self.outbound_rules
+            }
+            if self.outbound_rules
+            else {}
+        )
+        return RestManagedNetwork(
+            isolation_mode=self.isolation_mode, outbound_rules=rest_outbound_rules, firewall_sku=self.firewall_sku
+        )
+
+    @classmethod
+    def _from_rest_object(cls, obj: RestManagedNetwork) -> "ManagedNetwork":
+        from_rest_outbound_rules = (
+            [
+                OutboundRule._from_rest_object(obj.outbound_rules[name], name=name)  # pylint: disable=protected-access
+                for name in obj.outbound_rules
+            ]
+            if obj.outbound_rules
+            else {}
+        )
+        return ManagedNetwork(
+            isolation_mode=obj.isolation_mode,
+            outbound_rules=from_rest_outbound_rules,  # type: ignore[arg-type]
+            network_id=obj.network_id,
+            status=obj.status,
+            firewall_sku=obj.firewall_sku,
+        )
+
+
+class ManagedNetworkProvisionStatus:
+    """ManagedNetworkProvisionStatus.
+
+    :param status: Status for managed network provision.
+    :type status: str
+    :param spark_ready: Bool value indicating if managed network is spark ready
+    :type spark_ready: bool
+    """
+
+    def __init__(
+        self,
+        *,
+        status: Optional[str] = None,
+        spark_ready: Optional[bool] = None,
+    ):
+        self.status = status
+        self.spark_ready = spark_ready
+
+    @classmethod
+    def _from_rest_object(cls, rest_obj: RestManagedNetworkProvisionStatus) -> "ManagedNetworkProvisionStatus":
+        return cls(
+            status=rest_obj.status,
+            spark_ready=rest_obj.spark_ready,
+        )
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_workspace/private_endpoint.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_workspace/private_endpoint.py
new file mode 100644
index 00000000..c9e8882e
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_workspace/private_endpoint.py
@@ -0,0 +1,53 @@
+# ---------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# ---------------------------------------------------------
+
+from typing import Dict, Optional
+
+
+class EndpointConnection:
+    """Private Endpoint Connection related to a workspace private endpoint.
+
+    :param subscription_id: Subscription id of the connection.
+    :type subscription_id: str
+    :param resource_group: Resource group of the connection.
+    :type resource_group: str
+    :param vnet_name: Name of the virtual network of the connection.
+    :type vnet_name: str
+    :param subnet_name: Name of the subnet of the connection.
+    :type subnet_name: str
+    :param location: Location of the connection.
+    :type location: str
+    """
+
+    def __init__(
+        self,
+        subscription_id: str,
+        resource_group: str,
+        vnet_name: str,
+        subnet_name: str,
+        location: Optional[str] = None,
+    ):
+        self.subscription_id = subscription_id
+        self.resource_group = resource_group
+        self.location = location
+        self.vnet_name = vnet_name
+        self.subnet_name = subnet_name
+
+
+class PrivateEndpoint:
+    """Private Endpoint of a workspace.
+
+    :param approval_type: Approval type of the private endpoint.
+    :type approval_type: str
+    :param connections: List of private endpoint connections.
+    :type connections: List[~azure.ai.ml.entities.EndpointConnection]
+    """
+
+    def __init__(
+        self,
+        approval_type: Optional[str] = None,
+        connections: Optional[Dict[str, EndpointConnection]] = None,
+    ):
+        self.approval_type = approval_type
+        self.connections = connections
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_workspace/serverless_compute.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_workspace/serverless_compute.py
new file mode 100644
index 00000000..b78ede06
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_workspace/serverless_compute.py
@@ -0,0 +1,52 @@
+# ---------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# ---------------------------------------------------------
+from typing import Optional, Union
+
+from marshmallow.exceptions import ValidationError
+
+from azure.ai.ml._restclient.v2024_10_01_preview.models import (
+    ServerlessComputeSettings as RestServerlessComputeSettings,
+)
+from azure.ai.ml._schema._utils.utils import ArmId
+
+
+class ServerlessComputeSettings:
+    custom_subnet: Optional[ArmId]
+    no_public_ip: bool = False
+
+    def __init__(self, *, custom_subnet: Optional[Union[str, ArmId]] = None, no_public_ip: bool = False) -> None:
+        """Settings regarding serverless compute(s) in an Azure ML workspace.
+
+        :keyword custom_subnet: The ARM ID of the subnet to use for serverless compute(s).
+        :paramtype custom_subnet: Optional[Union[str, ArmId]]
+        :keyword no_public_ip: Whether or not to disable public IP addresses for serverless compute(s).
+            Defaults to False.
+        :paramtype no_public_ip: bool
+        :raises ValidationError: If the custom_subnet is not formatted as an ARM ID.
+        """
+        if isinstance(custom_subnet, str):
+            self.custom_subnet = ArmId(custom_subnet)
+        elif isinstance(custom_subnet, ArmId) or custom_subnet is None:
+            self.custom_subnet = custom_subnet
+        else:
+            raise ValidationError("custom_subnet must be a string, ArmId, or None.")
+        self.no_public_ip = no_public_ip
+
+    def __eq__(self, other: object) -> bool:
+        if not isinstance(other, ServerlessComputeSettings):
+            return NotImplemented
+        return self.custom_subnet == other.custom_subnet and self.no_public_ip == other.no_public_ip
+
+    def _to_rest_object(self) -> RestServerlessComputeSettings:
+        return RestServerlessComputeSettings(
+            serverless_compute_custom_subnet=self.custom_subnet,
+            serverless_compute_no_public_ip=self.no_public_ip,
+        )
+
+    @classmethod
+    def _from_rest_object(cls, obj: RestServerlessComputeSettings) -> "ServerlessComputeSettings":
+        return cls(
+            custom_subnet=obj.serverless_compute_custom_subnet,
+            no_public_ip=obj.serverless_compute_no_public_ip,
+        )
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_workspace/workspace.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_workspace/workspace.py
new file mode 100644
index 00000000..495e00b0
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_workspace/workspace.py
@@ -0,0 +1,491 @@
+# ---------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# ---------------------------------------------------------
+
+# pylint: disable=too-many-instance-attributes
+
+from os import PathLike
+from pathlib import Path
+from typing import IO, Any, AnyStr, Dict, List, Optional, Tuple, Type, Union
+
+from azure.ai.ml._restclient.v2024_10_01_preview.models import FeatureStoreSettings as RestFeatureStoreSettings
+from azure.ai.ml._restclient.v2024_10_01_preview.models import ManagedNetworkSettings as RestManagedNetwork
+from azure.ai.ml._restclient.v2024_10_01_preview.models import ManagedServiceIdentity as RestManagedServiceIdentity
+from azure.ai.ml._restclient.v2024_10_01_preview.models import NetworkAcls as RestNetworkAcls
+from azure.ai.ml._restclient.v2024_10_01_preview.models import (
+    ServerlessComputeSettings as RestServerlessComputeSettings,
+)
+from azure.ai.ml._restclient.v2024_10_01_preview.models import Workspace as RestWorkspace
+from azure.ai.ml._schema.workspace.workspace import WorkspaceSchema
+from azure.ai.ml._utils.utils import dump_yaml_to_file
+from azure.ai.ml.constants._common import (
+    BASE_PATH_CONTEXT_KEY,
+    PARAMS_OVERRIDE_KEY,
+    CommonYamlFields,
+    WorkspaceKind,
+    WorkspaceResourceConstants,
+)
+from azure.ai.ml.entities._credentials import IdentityConfiguration
+from azure.ai.ml.entities._resource import Resource
+from azure.ai.ml.entities._util import find_field_in_override, load_from_dict
+from azure.ai.ml.entities._workspace.serverless_compute import ServerlessComputeSettings
+from azure.ai.ml.exceptions import ErrorCategory, ErrorTarget, ValidationErrorType, ValidationException
+
+from .customer_managed_key import CustomerManagedKey
+from .feature_store_settings import FeatureStoreSettings
+from .network_acls import NetworkAcls
+from .networking import ManagedNetwork
+
+
+class Workspace(Resource):
+    """Azure ML workspace.
+
+    :param name: Name of the workspace.
+    :type name: str
+    :param description: Description of the workspace.
+    :type description: str
+    :param tags: Tags of the workspace.
+    :type tags: dict
+    :param display_name: Display name for the workspace. This is non-unique within the resource group.
+    :type display_name: str
+    :param location: The location to create the workspace in.
+        If not specified, the same location as the resource group will be used.
+    :type location: str
+    :param resource_group: Name of resource group to create the workspace in.
+    :type resource_group: str
+    :param hbi_workspace: Whether the customer data is of high business impact (HBI),
+        containing sensitive business information.
+        For more information, see
+        https://learn.microsoft.com/azure/machine-learning/concept-data-encryption#encryption-at-rest.
+    :type hbi_workspace: bool
+    :param storage_account: The resource ID of an existing storage account to use instead of creating a new one.
+    :type storage_account: str
+    :param container_registry: The resource ID of an existing container registry
+        to use instead of creating a new one.
+    :type container_registry: str
+    :param key_vault: The resource ID of an existing key vault to use instead of creating a new one.
+    :type key_vault: str
+    :param application_insights: The resource ID of an existing application insights
+        to use instead of creating a new one.
+    :type application_insights: str
+    :param customer_managed_key: Key vault details for encrypting data with customer-managed keys.
+        If not specified, Microsoft-managed keys will be used by default.
+    :type customer_managed_key: ~azure.ai.ml.entities.CustomerManagedKey
+    :param image_build_compute: The name of the compute target to use for building environment
+        Docker images with the container registry is behind a VNet.
+    :type image_build_compute: str
+    :param public_network_access: Whether to allow public endpoint connectivity
+        when a workspace is private link enabled.
+    :type public_network_access: str
+    :param network_acls: The network access control list (ACL) settings of the workspace.
+    :type network_acls: ~azure.ai.ml.entities.NetworkAcls
+    :param identity: workspace's Managed Identity (user assigned, or system assigned)
+    :type identity: ~azure.ai.ml.entities.IdentityConfiguration
+    :param primary_user_assigned_identity: The workspace's primary user assigned identity
+    :type primary_user_assigned_identity: str
+    :param managed_network: workspace's Managed Network configuration
+    :type managed_network: ~azure.ai.ml.entities.ManagedNetwork
+    :param provision_network_now: Set to trigger the provisioning of the managed vnet with the default options when
+        creating a workspace with the managed vnet enable, or else it does nothing
+    :type provision_network_now: Optional[bool]
+    :param system_datastores_auth_mode: The authentication mode for system datastores.
+    :type system_datastores_auth_mode: str
+    :param enable_data_isolation: A flag to determine if workspace has data isolation enabled.
+        The flag can only be set at the creation phase, it can't be updated.
+    :type enable_data_isolation: bool
+    :param allow_roleassignment_on_rg: Determine whether allow workspace role assignment on resource group level.
+    :type allow_roleassignment_on_rg: Optional[bool]
+    :param serverless_compute: The serverless compute settings for the workspace.
+    :type: ~azure.ai.ml.entities.ServerlessComputeSettings
+    :param workspace_hub: Deprecated resource ID of an existing workspace hub to help create project workspace.
+        Use the Project class instead now.
+    :type workspace_hub: Optional[str]
+    :param kwargs: A dictionary of additional configuration parameters.
+    :type kwargs: dict
+
+    .. literalinclude:: ../samples/ml_samples_workspace.py
+            :start-after: [START workspace]
+            :end-before: [END workspace]
+            :language: python
+            :dedent: 8
+            :caption: Creating a Workspace object.
+    """
+
+    def __init__(
+        self,
+        *,
+        name: str,
+        description: Optional[str] = None,
+        tags: Optional[Dict[str, str]] = None,
+        display_name: Optional[str] = None,
+        location: Optional[str] = None,
+        resource_group: Optional[str] = None,
+        hbi_workspace: bool = False,
+        storage_account: Optional[str] = None,
+        container_registry: Optional[str] = None,
+        key_vault: Optional[str] = None,
+        application_insights: Optional[str] = None,
+        customer_managed_key: Optional[CustomerManagedKey] = None,
+        image_build_compute: Optional[str] = None,
+        public_network_access: Optional[str] = None,
+        network_acls: Optional[NetworkAcls] = None,
+        identity: Optional[IdentityConfiguration] = None,
+        primary_user_assigned_identity: Optional[str] = None,
+        managed_network: Optional[ManagedNetwork] = None,
+        provision_network_now: Optional[bool] = None,
+        system_datastores_auth_mode: Optional[str] = None,
+        enable_data_isolation: bool = False,
+        allow_roleassignment_on_rg: Optional[bool] = None,
+        hub_id: Optional[str] = None,  # Hidden input, surfaced by Project
+        workspace_hub: Optional[str] = None,  # Deprecated input maintained for backwards compat.
+        serverless_compute: Optional[ServerlessComputeSettings] = None,
+        **kwargs: Any,
+    ):
+        # Workspaces have subclasses that are differentiated by the 'kind' field in the REST API.
+        # Now that this value is occasionally surfaced (for sub-class YAML specifications)
+        # We've switched to using 'type' in the SDK for consistency's sake with other polymorphic classes.
+        # That said, the code below but quietly supports 'kind' as an input
+        # to maintain backwards compatibility with internal systems that I suspect still use 'kind' somewhere.
+        # 'type' takes precedence over 'kind' if they're both set, and this defaults to a normal workspace's type
+        # if nothing is set.
+        # pylint: disable=too-many-locals
+        self._kind = kwargs.pop("kind", None)
+        if self._kind is None:
+            self._kind = WorkspaceKind.DEFAULT
+
+        self.print_as_yaml = True
+        self._discovery_url: Optional[str] = kwargs.pop("discovery_url", None)
+        self._mlflow_tracking_uri: Optional[str] = kwargs.pop("mlflow_tracking_uri", None)
+        self._workspace_id = kwargs.pop("workspace_id", None)
+        self._feature_store_settings: Optional[FeatureStoreSettings] = kwargs.pop("feature_store_settings", None)
+        super().__init__(name=name, description=description, tags=tags, **kwargs)
+
+        self.display_name = display_name
+        self.location = location
+        self.resource_group = resource_group
+        self.hbi_workspace = hbi_workspace
+        self.storage_account = storage_account
+        self.container_registry = container_registry
+        self.key_vault = key_vault
+        self.application_insights = application_insights
+        self.customer_managed_key = customer_managed_key
+        self.image_build_compute = image_build_compute
+        self.public_network_access = public_network_access
+        self.identity = identity
+        self.primary_user_assigned_identity = primary_user_assigned_identity
+        self.managed_network = managed_network
+        self.provision_network_now = provision_network_now
+        self.system_datastores_auth_mode = system_datastores_auth_mode
+        self.enable_data_isolation = enable_data_isolation
+        self.allow_roleassignment_on_rg = allow_roleassignment_on_rg
+        if workspace_hub and not hub_id:
+            hub_id = workspace_hub
+        self.__hub_id = hub_id
+        # Overwrite kind if hub_id is provided. Technically not needed anymore,
+        # but kept for backwards if people try to just use a normal workspace like
+        # a project.
+        if hub_id:
+            self._kind = WorkspaceKind.PROJECT
+        self.serverless_compute: Optional[ServerlessComputeSettings] = serverless_compute
+        self.network_acls: Optional[NetworkAcls] = network_acls
+
+    @property
+    def discovery_url(self) -> Optional[str]:
+        """Backend service base URLs for the workspace.
+
+        :return: Backend service URLs of the workspace
+        :rtype: str
+        """
+        return self._discovery_url
+
+    # Exists to appease tox's mypy rules.
+    @property
+    def _hub_id(self) -> Optional[str]:
+        """The UID of the hub parent of the project. This is an internal property
+        that's surfaced by the Project sub-class, but exists here for backwards-compatibility
+        reasons.
+
+        :return: Resource ID of the parent hub.
+        :rtype: str
+        """
+        return self.__hub_id
+
+    # Exists to appease tox's mypy rules.
+    @_hub_id.setter
+    def _hub_id(self, value: str):
+        """Set the hub of the project. This is an internal property
+        that's surfaced by the Project sub-class, but exists here for backwards-compatibility
+        reasons.
+
+
+        :param value: The hub id to assign to the project.
+            Note: cannot be reassigned after creation.
+        :type value: str
+        """
+        if not value:
+            return
+        self.__hub_id = value
+
+    @property
+    def mlflow_tracking_uri(self) -> Optional[str]:
+        """MLflow tracking uri for the workspace.
+
+        :return: Returns mlflow tracking uri of the workspace.
+        :rtype: str
+        """
+        return self._mlflow_tracking_uri
+
+    def dump(self, dest: Union[str, PathLike, IO[AnyStr]], **kwargs: Any) -> None:
+        """Dump the workspace spec into a file in yaml format.
+
+        :param dest: The destination to receive this workspace's spec.
+            Must be either a path to a local file, or an already-open file stream.
+            If dest is a file path, a new file will be created,
+            and an exception is raised if the file exists.
+            If dest is an open file, the file will be written to directly,
+            and an exception will be raised if the file is not writable.
+        :type dest: Union[PathLike, str, IO[AnyStr]]
+        """
+        path = kwargs.pop("path", None)
+        yaml_serialized = self._to_dict()
+        dump_yaml_to_file(dest, yaml_serialized, default_flow_style=False, path=path, **kwargs)
+
+    def _to_dict(self) -> Dict:
+        res: dict = self._get_schema_class()(context={BASE_PATH_CONTEXT_KEY: "./"}).dump(self)
+        return res
+
+    @classmethod
+    def _resolve_sub_cls_and_kind(
+        cls, data: Dict, params_override: Optional[List[Dict]] = None
+    ) -> Tuple[Type["Workspace"], str]:
+        """Given a workspace data dictionary, determine the appropriate workspace class and type string.
+        Allows for easier polymorphism between the workspace class and its children.
+        Adapted from similar code in the Job class.
+
+        :param data: A dictionary of values describing the workspace.
+        :type data: Dict
+        :param params_override: Override values from alternative sources (ex: CLI input).
+        :type params_override: Optional[List[Dict]]
+        :return: A tuple containing the workspace class and type string.
+        :rtype: Tuple[Type["Workspace"], str]
+        """
+        from azure.ai.ml.entities import Hub, Project
+
+        workspace_class: Optional[Type["Workspace"]] = None
+        type_in_override = find_field_in_override(CommonYamlFields.KIND, params_override)
+        type_str = type_in_override or data.get(CommonYamlFields.KIND, WorkspaceKind.DEFAULT)
+        if type_str is not None:
+            type_str = type_str.lower()
+        if type_str == WorkspaceKind.HUB:
+            workspace_class = Hub
+        elif type_str == WorkspaceKind.PROJECT:
+            workspace_class = Project
+        elif type_str == WorkspaceKind.DEFAULT:
+            workspace_class = Workspace
+        else:
+            msg = f"Unsupported workspace type: {type_str}."
+            raise ValidationException(
+                message=msg,
+                no_personal_data_message=msg,
+                target=ErrorTarget.WORKSPACE,
+                error_category=ErrorCategory.USER_ERROR,
+                error_type=ValidationErrorType.INVALID_VALUE,
+            )
+        return workspace_class, type_str
+
+    @classmethod
+    def _load(
+        cls,
+        data: Optional[Dict] = None,
+        yaml_path: Optional[Union[PathLike, str]] = None,
+        params_override: Optional[list] = None,
+        **kwargs: Any,
+    ) -> "Workspace":
+        # This _load function is polymorphic and can return child classes.
+        # It was adapted from the Job class's similar function.
+        data = data or {}
+        params_override = params_override or []
+        context = {
+            BASE_PATH_CONTEXT_KEY: Path(yaml_path).parent if yaml_path else Path("./"),
+            PARAMS_OVERRIDE_KEY: params_override,
+        }
+        workspace_class, type_str = cls._resolve_sub_cls_and_kind(data, params_override)
+        schema_type = workspace_class._get_schema_class()  # pylint: disable=protected-access
+        loaded_schema = load_from_dict(
+            schema_type,
+            data=data,
+            context=context,
+            additional_message=f"If you are trying to configure a workspace that is not of type {type_str},"
+            f" please specify the correct job type in the 'type' property.",
+            **kwargs,
+        )
+        result = workspace_class(**loaded_schema)
+        if yaml_path:
+            result._source_path = yaml_path  # pylint: disable=protected-access
+        return result
+
+    @classmethod
+    def _from_rest_object(
+        cls, rest_obj: RestWorkspace, v2_service_context: Optional[object] = None
+    ) -> Optional["Workspace"]:
+
+        if not rest_obj:
+            return None
+        customer_managed_key = (
+            CustomerManagedKey(
+                key_vault=rest_obj.encryption.key_vault_properties.key_vault_arm_id,
+                key_uri=rest_obj.encryption.key_vault_properties.key_identifier,
+            )
+            if rest_obj.encryption
+            and rest_obj.encryption.status == WorkspaceResourceConstants.ENCRYPTION_STATUS_ENABLED
+            else None
+        )
+
+        # TODO: Remove attribute check once Oct API version is out
+        mlflow_tracking_uri = None
+
+        if hasattr(rest_obj, "ml_flow_tracking_uri"):
+            try:
+                if v2_service_context:
+                    # v2_service_context is required (not None) in get_mlflow_tracking_uri_v2
+                    from azureml.mlflow import get_mlflow_tracking_uri_v2
+
+                    mlflow_tracking_uri = get_mlflow_tracking_uri_v2(rest_obj, v2_service_context)
+                else:
+                    mlflow_tracking_uri = rest_obj.ml_flow_tracking_uri
+            except ImportError:
+                mlflow_tracking_uri = rest_obj.ml_flow_tracking_uri
+
+        # TODO: Remove once Online Endpoints updates API version to at least 2023-08-01
+        allow_roleassignment_on_rg = None
+        if hasattr(rest_obj, "allow_role_assignment_on_rg"):
+            allow_roleassignment_on_rg = rest_obj.allow_role_assignment_on_rg
+        system_datastores_auth_mode = None
+        if hasattr(rest_obj, "system_datastores_auth_mode"):
+            system_datastores_auth_mode = rest_obj.system_datastores_auth_mode
+
+        # TODO: remove this once it is included in API response
+        managed_network = None
+        if hasattr(rest_obj, "managed_network"):
+            if rest_obj.managed_network and isinstance(rest_obj.managed_network, RestManagedNetwork):
+                managed_network = ManagedNetwork._from_rest_object(  # pylint: disable=protected-access
+                    rest_obj.managed_network
+                )
+
+        # TODO: Remove once it's included in response
+        provision_network_now = None
+        if hasattr(rest_obj, "provision_network_now"):
+            provision_network_now = rest_obj.provision_network_now
+
+        armid_parts = str(rest_obj.id).split("/")
+        group = None if len(armid_parts) < 4 else armid_parts[4]
+        identity = None
+        if rest_obj.identity and isinstance(rest_obj.identity, RestManagedServiceIdentity):
+            identity = IdentityConfiguration._from_workspace_rest_object(  # pylint: disable=protected-access
+                rest_obj.identity
+            )
+        feature_store_settings = None
+        if rest_obj.feature_store_settings and isinstance(rest_obj.feature_store_settings, RestFeatureStoreSettings):
+            feature_store_settings = FeatureStoreSettings._from_rest_object(  # pylint: disable=protected-access
+                rest_obj.feature_store_settings
+            )
+        serverless_compute = None
+        # TODO: Remove attribute check once serverless_compute_settings is in API response contract
+        if hasattr(rest_obj, "serverless_compute_settings"):
+            if rest_obj.serverless_compute_settings and isinstance(
+                rest_obj.serverless_compute_settings, RestServerlessComputeSettings
+            ):
+                serverless_compute = ServerlessComputeSettings._from_rest_object(  # pylint: disable=protected-access
+                    rest_obj.serverless_compute_settings
+                )
+        network_acls = None
+        if hasattr(rest_obj, "network_acls"):
+            if rest_obj.network_acls and isinstance(rest_obj.network_acls, RestNetworkAcls):
+                network_acls = NetworkAcls._from_rest_object(rest_obj.network_acls)  # pylint: disable=protected-access
+
+        return cls(
+            name=rest_obj.name,
+            id=rest_obj.id,
+            description=rest_obj.description,
+            kind=rest_obj.kind.lower() if rest_obj.kind else None,
+            tags=rest_obj.tags,
+            location=rest_obj.location,
+            resource_group=group,
+            display_name=rest_obj.friendly_name,
+            discovery_url=rest_obj.discovery_url,
+            hbi_workspace=rest_obj.hbi_workspace,
+            storage_account=rest_obj.storage_account,
+            container_registry=rest_obj.container_registry,
+            key_vault=rest_obj.key_vault,
+            application_insights=rest_obj.application_insights,
+            customer_managed_key=customer_managed_key,
+            image_build_compute=rest_obj.image_build_compute,
+            public_network_access=rest_obj.public_network_access,
+            network_acls=network_acls,
+            mlflow_tracking_uri=mlflow_tracking_uri,
+            identity=identity,
+            primary_user_assigned_identity=rest_obj.primary_user_assigned_identity,
+            managed_network=managed_network,
+            provision_network_now=provision_network_now,
+            system_datastores_auth_mode=system_datastores_auth_mode,
+            feature_store_settings=feature_store_settings,
+            enable_data_isolation=rest_obj.enable_data_isolation,
+            allow_roleassignment_on_rg=allow_roleassignment_on_rg,
+            hub_id=rest_obj.hub_resource_id,
+            workspace_id=rest_obj.workspace_id,
+            serverless_compute=serverless_compute,
+        )
+
+    def _to_rest_object(self) -> RestWorkspace:
+        """Note: Unlike most entities, the create operation for workspaces does NOTE use this function,
+        and instead relies on its own internal conversion process to produce a valid ARM template.
+
+        :return: The REST API object-equivalent of this workspace.
+        :rtype: RestWorkspace
+        """
+        feature_store_settings = None
+        if self._feature_store_settings:
+            feature_store_settings = self._feature_store_settings._to_rest_object()  # pylint: disable=protected-access
+
+        serverless_compute_settings = None
+        if self.serverless_compute:
+            serverless_compute_settings = self.serverless_compute._to_rest_object()  # pylint: disable=protected-access
+
+        return RestWorkspace(
+            name=self.name,
+            identity=(
+                self.identity._to_workspace_rest_object() if self.identity else None  # pylint: disable=protected-access
+            ),
+            location=self.location,
+            tags=self.tags,
+            description=self.description,
+            kind=self._kind,
+            friendly_name=self.display_name,
+            key_vault=self.key_vault,
+            application_insights=self.application_insights,
+            container_registry=self.container_registry,
+            storage_account=self.storage_account,
+            discovery_url=self.discovery_url,
+            hbi_workspace=self.hbi_workspace,
+            image_build_compute=self.image_build_compute,
+            public_network_access=self.public_network_access,
+            primary_user_assigned_identity=self.primary_user_assigned_identity,
+            managed_network=(
+                self.managed_network._to_rest_object()  # pylint: disable=protected-access
+                if self.managed_network
+                else None
+            ),
+            provision_network_now=self.provision_network_now,
+            system_datastores_auth_mode=self.system_datastores_auth_mode,
+            feature_store_settings=feature_store_settings,
+            enable_data_isolation=self.enable_data_isolation,
+            allow_role_assignment_on_rg=self.allow_roleassignment_on_rg,  # diff due to swagger restclient casing diff
+            hub_resource_id=self._hub_id,
+            serverless_compute_settings=serverless_compute_settings,
+        )
+
+    # Helper for sub-class polymorphism. Needs to be overwritten by child classes
+    # If they don't want to redefine things like _to_dict.
+    @classmethod
+    def _get_schema_class(cls) -> Type[WorkspaceSchema]:
+        return WorkspaceSchema
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_workspace/workspace_keys.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_workspace/workspace_keys.py
new file mode 100644
index 00000000..4213b419
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_workspace/workspace_keys.py
@@ -0,0 +1,100 @@
+# ---------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# ---------------------------------------------------------
+
+from typing import List, Optional
+
+from azure.ai.ml._restclient.v2024_10_01_preview.models import ListWorkspaceKeysResult
+
+
+class ContainerRegistryCredential:
+    """Key for ACR associated with given workspace.
+
+    :param location:  Location of the ACR
+    :type location: str
+    :param username: Username of the ACR
+    :type username: str
+    :param passwords: Passwords to access the ACR
+    :type passwords: List[str]
+    """
+
+    def __init__(
+        self, *, location: Optional[str] = None, username: Optional[str] = None, passwords: Optional[List[str]] = None
+    ):
+        self.location = location
+        self.username = username
+        self.passwords = passwords
+
+
+class NotebookAccessKeys:
+    """Key for notebook resource associated with given workspace.
+
+    :param primary_access_key:  Primary access key of notebook resource
+    :type primary_access_key: str
+    :param secondary_access_key: Secondary access key of notebook resource
+    :type secondary_access_key: str
+    """
+
+    def __init__(self, *, primary_access_key: Optional[str] = None, secondary_access_key: Optional[str] = None):
+        self.primary_access_key = primary_access_key
+        self.secondary_access_key = secondary_access_key
+
+
+class WorkspaceKeys:
+    """Workspace Keys.
+
+    :param user_storage_key: Key for storage account associated with given workspace
+    :type user_storage_key: str
+    :param user_storage_resource_id: Resource id of storage account associated with given workspace
+    :type user_storage_resource_id: str
+    :param app_insights_instrumentation_key: Key for app insights associated with given workspace
+    :type app_insights_instrumentation_key: str
+    :param container_registry_credentials: Key for ACR associated with given workspace
+    :type container_registry_credentials: ContainerRegistryCredential
+    :param notebook_access_keys: Key for notebook resource associated with given workspace
+    :type notebook_access_keys: NotebookAccessKeys
+    """
+
+    def __init__(
+        self,
+        *,
+        user_storage_key: Optional[str] = None,
+        user_storage_resource_id: Optional[str] = None,
+        app_insights_instrumentation_key: Optional[str] = None,
+        container_registry_credentials: Optional[ContainerRegistryCredential] = None,
+        notebook_access_keys: Optional[NotebookAccessKeys] = None
+    ):
+        self.user_storage_key = user_storage_key
+        self.user_storage_resource_id = user_storage_resource_id
+        self.app_insights_instrumentation_key = app_insights_instrumentation_key
+        self.container_registry_credentials = container_registry_credentials
+        self.notebook_access_keys = notebook_access_keys
+
+    @classmethod
+    def _from_rest_object(cls, rest_obj: ListWorkspaceKeysResult) -> Optional["WorkspaceKeys"]:
+        if not rest_obj:
+            return None
+
+        container_registry_credentials = None
+        notebook_access_keys = None
+
+        if hasattr(rest_obj, "container_registry_credentials") and rest_obj.container_registry_credentials is not None:
+            container_registry_credentials = ContainerRegistryCredential(
+                location=rest_obj.container_registry_credentials.location,
+                username=rest_obj.container_registry_credentials.username,
+                passwords=rest_obj.container_registry_credentials.passwords,
+            )
+
+        if hasattr(rest_obj, "notebook_access_keys") and rest_obj.notebook_access_keys is not None:
+            notebook_access_keys = NotebookAccessKeys(
+                primary_access_key=rest_obj.notebook_access_keys.primary_access_key,
+                secondary_access_key=rest_obj.notebook_access_keys.secondary_access_key,
+            )
+
+        return WorkspaceKeys(
+            user_storage_key=rest_obj.user_storage_key,
+            user_storage_resource_id=rest_obj.user_storage_arm_id,
+            app_insights_instrumentation_key=rest_obj.app_insights_instrumentation_key,
+            container_registry_credentials=container_registry_credentials,
+            notebook_access_keys=notebook_access_keys,
+        )