about summary refs log tree commit diff
path: root/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/dataset_dataplane
diff options
context:
space:
mode:
Diffstat (limited to '.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/dataset_dataplane')
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/dataset_dataplane/__init__.py18
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/dataset_dataplane/_azure_machine_learning_workspaces.py117
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/dataset_dataplane/_configuration.py64
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/dataset_dataplane/_patch.py31
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/dataset_dataplane/_vendor.py27
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/dataset_dataplane/_version.py9
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/dataset_dataplane/aio/__init__.py15
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/dataset_dataplane/aio/_azure_machine_learning_workspaces.py111
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/dataset_dataplane/aio/_configuration.py60
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/dataset_dataplane/aio/_patch.py31
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/dataset_dataplane/aio/operations/__init__.py27
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/dataset_dataplane/aio/operations/_data_call_operations.py243
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/dataset_dataplane/aio/operations/_data_container_operations.py321
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/dataset_dataplane/aio/operations/_data_version_operations.py804
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/dataset_dataplane/aio/operations/_dataset_controller_v2_operations.py845
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/dataset_dataplane/aio/operations/_dataset_v2_operations.py592
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/dataset_dataplane/aio/operations/_datasets_v1_operations.py845
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/dataset_dataplane/aio/operations/_delete_operations.py104
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/dataset_dataplane/aio/operations/_get_operation_status_operations.py170
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/dataset_dataplane/models/__init__.py187
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/dataset_dataplane/models/_azure_machine_learning_workspaces_enums.py118
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/dataset_dataplane/models/_models.py2608
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/dataset_dataplane/models/_models_py3.py2916
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/dataset_dataplane/operations/__init__.py27
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/dataset_dataplane/operations/_data_call_operations.py356
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/dataset_dataplane/operations/_data_container_operations.py464
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/dataset_dataplane/operations/_data_version_operations.py1211
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/dataset_dataplane/operations/_dataset_controller_v2_operations.py1300
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/dataset_dataplane/operations/_dataset_v2_operations.py905
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/dataset_dataplane/operations/_datasets_v1_operations.py1300
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/dataset_dataplane/operations/_delete_operations.py145
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/dataset_dataplane/operations/_get_operation_status_operations.py212
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/dataset_dataplane/py.typed1
33 files changed, 16184 insertions, 0 deletions
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/dataset_dataplane/__init__.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/dataset_dataplane/__init__.py
new file mode 100644
index 00000000..da466144
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/dataset_dataplane/__init__.py
@@ -0,0 +1,18 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from ._azure_machine_learning_workspaces import AzureMachineLearningWorkspaces
+from ._version import VERSION
+
+__version__ = VERSION
+__all__ = ['AzureMachineLearningWorkspaces']
+
+# `._patch.py` is used for handwritten extensions to the generated code
+# Example: https://github.com/Azure/azure-sdk-for-python/blob/main/doc/dev/customize_code/how-to-patch-sdk-code.md
+from ._patch import patch_sdk
+patch_sdk()
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/dataset_dataplane/_azure_machine_learning_workspaces.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/dataset_dataplane/_azure_machine_learning_workspaces.py
new file mode 100644
index 00000000..faa760c5
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/dataset_dataplane/_azure_machine_learning_workspaces.py
@@ -0,0 +1,117 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from copy import deepcopy
+from typing import TYPE_CHECKING
+
+from azure.mgmt.core import ARMPipelineClient
+from msrest import Deserializer, Serializer
+
+from . import models
+from ._configuration import AzureMachineLearningWorkspacesConfiguration
+from .operations import DataCallOperations, DataContainerOperations, DataVersionOperations, DatasetControllerV2Operations, DatasetV2Operations, DatasetsV1Operations, DeleteOperations, GetOperationStatusOperations
+
+if TYPE_CHECKING:
+    # pylint: disable=unused-import,ungrouped-imports
+    from typing import Any, Optional
+
+    from azure.core.credentials import TokenCredential
+    from azure.core.rest import HttpRequest, HttpResponse
+
+class AzureMachineLearningWorkspaces(object):
+    """AzureMachineLearningWorkspaces.
+
+    :ivar data_call: DataCallOperations operations
+    :vartype data_call: azure.mgmt.machinelearningservices.operations.DataCallOperations
+    :ivar data_container: DataContainerOperations operations
+    :vartype data_container: azure.mgmt.machinelearningservices.operations.DataContainerOperations
+    :ivar delete: DeleteOperations operations
+    :vartype delete: azure.mgmt.machinelearningservices.operations.DeleteOperations
+    :ivar datasets_v1: DatasetsV1Operations operations
+    :vartype datasets_v1: azure.mgmt.machinelearningservices.operations.DatasetsV1Operations
+    :ivar dataset_controller_v2: DatasetControllerV2Operations operations
+    :vartype dataset_controller_v2:
+     azure.mgmt.machinelearningservices.operations.DatasetControllerV2Operations
+    :ivar dataset_v2: DatasetV2Operations operations
+    :vartype dataset_v2: azure.mgmt.machinelearningservices.operations.DatasetV2Operations
+    :ivar data_version: DataVersionOperations operations
+    :vartype data_version: azure.mgmt.machinelearningservices.operations.DataVersionOperations
+    :ivar get_operation_status: GetOperationStatusOperations operations
+    :vartype get_operation_status:
+     azure.mgmt.machinelearningservices.operations.GetOperationStatusOperations
+    :param credential: Credential needed for the client to connect to Azure.
+    :type credential: ~azure.core.credentials.TokenCredential
+    :param base_url: Service URL. Default value is ''.
+    :type base_url: str
+    :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
+     Retry-After header is present.
+    """
+
+    def __init__(
+        self,
+        credential,  # type: "TokenCredential"
+        base_url="",  # type: str
+        **kwargs  # type: Any
+    ):
+        # type: (...) -> None
+        self._config = AzureMachineLearningWorkspacesConfiguration(credential=credential, **kwargs)
+        self._client = ARMPipelineClient(base_url=base_url, config=self._config, **kwargs)
+
+        client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)}
+        self._serialize = Serializer(client_models)
+        self._deserialize = Deserializer(client_models)
+        self._serialize.client_side_validation = False
+        self.data_call = DataCallOperations(self._client, self._config, self._serialize, self._deserialize)
+        self.data_container = DataContainerOperations(self._client, self._config, self._serialize, self._deserialize)
+        self.delete = DeleteOperations(self._client, self._config, self._serialize, self._deserialize)
+        self.datasets_v1 = DatasetsV1Operations(self._client, self._config, self._serialize, self._deserialize)
+        self.dataset_controller_v2 = DatasetControllerV2Operations(self._client, self._config, self._serialize, self._deserialize)
+        self.dataset_v2 = DatasetV2Operations(self._client, self._config, self._serialize, self._deserialize)
+        self.data_version = DataVersionOperations(self._client, self._config, self._serialize, self._deserialize)
+        self.get_operation_status = GetOperationStatusOperations(self._client, self._config, self._serialize, self._deserialize)
+
+
+    def _send_request(
+        self,
+        request,  # type: HttpRequest
+        **kwargs  # type: Any
+    ):
+        # type: (...) -> HttpResponse
+        """Runs the network request through the client's chained policies.
+
+        >>> from azure.core.rest import HttpRequest
+        >>> request = HttpRequest("GET", "https://www.example.org/")
+        <HttpRequest [GET], url: 'https://www.example.org/'>
+        >>> response = client._send_request(request)
+        <HttpResponse: 200 OK>
+
+        For more information on this code flow, see https://aka.ms/azsdk/python/protocol/quickstart
+
+        :param request: The network request you want to make. Required.
+        :type request: ~azure.core.rest.HttpRequest
+        :keyword bool stream: Whether the response payload will be streamed. Defaults to False.
+        :return: The response of your network call. Does not do error handling on your response.
+        :rtype: ~azure.core.rest.HttpResponse
+        """
+
+        request_copy = deepcopy(request)
+        request_copy.url = self._client.format_url(request_copy.url)
+        return self._client.send_request(request_copy, **kwargs)
+
+    def close(self):
+        # type: () -> None
+        self._client.close()
+
+    def __enter__(self):
+        # type: () -> AzureMachineLearningWorkspaces
+        self._client.__enter__()
+        return self
+
+    def __exit__(self, *exc_details):
+        # type: (Any) -> None
+        self._client.__exit__(*exc_details)
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/dataset_dataplane/_configuration.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/dataset_dataplane/_configuration.py
new file mode 100644
index 00000000..2ec7eb9e
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/dataset_dataplane/_configuration.py
@@ -0,0 +1,64 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from typing import TYPE_CHECKING
+
+from azure.core.configuration import Configuration
+from azure.core.pipeline import policies
+from azure.mgmt.core.policies import ARMChallengeAuthenticationPolicy, ARMHttpLoggingPolicy
+
+from ._version import VERSION
+
+if TYPE_CHECKING:
+    # pylint: disable=unused-import,ungrouped-imports
+    from typing import Any
+
+    from azure.core.credentials import TokenCredential
+
+
+class AzureMachineLearningWorkspacesConfiguration(Configuration):
+    """Configuration for AzureMachineLearningWorkspaces.
+
+    Note that all parameters used to create this instance are saved as instance
+    attributes.
+
+    :param credential: Credential needed for the client to connect to Azure.
+    :type credential: ~azure.core.credentials.TokenCredential
+    """
+
+    def __init__(
+        self,
+        credential,  # type: "TokenCredential"
+        **kwargs  # type: Any
+    ):
+        # type: (...) -> None
+        super(AzureMachineLearningWorkspacesConfiguration, self).__init__(**kwargs)
+        if credential is None:
+            raise ValueError("Parameter 'credential' must not be None.")
+
+        self.credential = credential
+        self.credential_scopes = kwargs.pop('credential_scopes', ['https://management.azure.com/.default'])
+        kwargs.setdefault('sdk_moniker', 'mgmt-machinelearningservices/{}'.format(VERSION))
+        self._configure(**kwargs)
+
+    def _configure(
+        self,
+        **kwargs  # type: Any
+    ):
+        # type: (...) -> None
+        self.user_agent_policy = kwargs.get('user_agent_policy') or policies.UserAgentPolicy(**kwargs)
+        self.headers_policy = kwargs.get('headers_policy') or policies.HeadersPolicy(**kwargs)
+        self.proxy_policy = kwargs.get('proxy_policy') or policies.ProxyPolicy(**kwargs)
+        self.logging_policy = kwargs.get('logging_policy') or policies.NetworkTraceLoggingPolicy(**kwargs)
+        self.http_logging_policy = kwargs.get('http_logging_policy') or ARMHttpLoggingPolicy(**kwargs)
+        self.retry_policy = kwargs.get('retry_policy') or policies.RetryPolicy(**kwargs)
+        self.custom_hook_policy = kwargs.get('custom_hook_policy') or policies.CustomHookPolicy(**kwargs)
+        self.redirect_policy = kwargs.get('redirect_policy') or policies.RedirectPolicy(**kwargs)
+        self.authentication_policy = kwargs.get('authentication_policy')
+        if self.credential and not self.authentication_policy:
+            self.authentication_policy = ARMChallengeAuthenticationPolicy(self.credential, *self.credential_scopes, **kwargs)
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/dataset_dataplane/_patch.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/dataset_dataplane/_patch.py
new file mode 100644
index 00000000..74e48ecd
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/dataset_dataplane/_patch.py
@@ -0,0 +1,31 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+#
+# Copyright (c) Microsoft Corporation. All rights reserved.
+#
+# The MIT License (MIT)
+#
+# Permission is hereby granted, free of charge, to any person obtaining a copy
+# of this software and associated documentation files (the ""Software""), to
+# deal in the Software without restriction, including without limitation the
+# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+# sell copies of the Software, and to permit persons to whom the Software is
+# furnished to do so, subject to the following conditions:
+#
+# The above copyright notice and this permission notice shall be included in
+# all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+# IN THE SOFTWARE.
+#
+# --------------------------------------------------------------------------
+
+# This file is used for handwritten extensions to the generated code. Example:
+# https://github.com/Azure/azure-sdk-for-python/blob/main/doc/dev/customize_code/how-to-patch-sdk-code.md
+def patch_sdk():
+    pass
\ No newline at end of file
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/dataset_dataplane/_vendor.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/dataset_dataplane/_vendor.py
new file mode 100644
index 00000000..138f663c
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/dataset_dataplane/_vendor.py
@@ -0,0 +1,27 @@
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from azure.core.pipeline.transport import HttpRequest
+
+def _convert_request(request, files=None):
+    data = request.content if not files else None
+    request = HttpRequest(method=request.method, url=request.url, headers=request.headers, data=data)
+    if files:
+        request.set_formdata_body(files)
+    return request
+
+def _format_url_section(template, **kwargs):
+    components = template.split("/")
+    while components:
+        try:
+            return template.format(**kwargs)
+        except KeyError as key:
+            formatted_components = template.split("/")
+            components = [
+                c for c in formatted_components if "{}".format(key.args[0]) not in c
+            ]
+            template = "/".join(components)
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/dataset_dataplane/_version.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/dataset_dataplane/_version.py
new file mode 100644
index 00000000..eae7c95b
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/dataset_dataplane/_version.py
@@ -0,0 +1,9 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+VERSION = "0.1.0"
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/dataset_dataplane/aio/__init__.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/dataset_dataplane/aio/__init__.py
new file mode 100644
index 00000000..f67ccda9
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/dataset_dataplane/aio/__init__.py
@@ -0,0 +1,15 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from ._azure_machine_learning_workspaces import AzureMachineLearningWorkspaces
+__all__ = ['AzureMachineLearningWorkspaces']
+
+# `._patch.py` is used for handwritten extensions to the generated code
+# Example: https://github.com/Azure/azure-sdk-for-python/blob/main/doc/dev/customize_code/how-to-patch-sdk-code.md
+from ._patch import patch_sdk
+patch_sdk()
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/dataset_dataplane/aio/_azure_machine_learning_workspaces.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/dataset_dataplane/aio/_azure_machine_learning_workspaces.py
new file mode 100644
index 00000000..597cca3d
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/dataset_dataplane/aio/_azure_machine_learning_workspaces.py
@@ -0,0 +1,111 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from copy import deepcopy
+from typing import Any, Awaitable, Optional, TYPE_CHECKING
+
+from azure.core.rest import AsyncHttpResponse, HttpRequest
+from azure.mgmt.core import AsyncARMPipelineClient
+from msrest import Deserializer, Serializer
+
+from .. import models
+from ._configuration import AzureMachineLearningWorkspacesConfiguration
+from .operations import DataCallOperations, DataContainerOperations, DataVersionOperations, DatasetControllerV2Operations, DatasetV2Operations, DatasetsV1Operations, DeleteOperations, GetOperationStatusOperations
+
+if TYPE_CHECKING:
+    # pylint: disable=unused-import,ungrouped-imports
+    from azure.core.credentials_async import AsyncTokenCredential
+
+class AzureMachineLearningWorkspaces:
+    """AzureMachineLearningWorkspaces.
+
+    :ivar data_call: DataCallOperations operations
+    :vartype data_call: azure.mgmt.machinelearningservices.aio.operations.DataCallOperations
+    :ivar data_container: DataContainerOperations operations
+    :vartype data_container:
+     azure.mgmt.machinelearningservices.aio.operations.DataContainerOperations
+    :ivar delete: DeleteOperations operations
+    :vartype delete: azure.mgmt.machinelearningservices.aio.operations.DeleteOperations
+    :ivar datasets_v1: DatasetsV1Operations operations
+    :vartype datasets_v1: azure.mgmt.machinelearningservices.aio.operations.DatasetsV1Operations
+    :ivar dataset_controller_v2: DatasetControllerV2Operations operations
+    :vartype dataset_controller_v2:
+     azure.mgmt.machinelearningservices.aio.operations.DatasetControllerV2Operations
+    :ivar dataset_v2: DatasetV2Operations operations
+    :vartype dataset_v2: azure.mgmt.machinelearningservices.aio.operations.DatasetV2Operations
+    :ivar data_version: DataVersionOperations operations
+    :vartype data_version: azure.mgmt.machinelearningservices.aio.operations.DataVersionOperations
+    :ivar get_operation_status: GetOperationStatusOperations operations
+    :vartype get_operation_status:
+     azure.mgmt.machinelearningservices.aio.operations.GetOperationStatusOperations
+    :param credential: Credential needed for the client to connect to Azure.
+    :type credential: ~azure.core.credentials_async.AsyncTokenCredential
+    :param base_url: Service URL. Default value is ''.
+    :type base_url: str
+    :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
+     Retry-After header is present.
+    """
+
+    def __init__(
+        self,
+        credential: "AsyncTokenCredential",
+        base_url: str = "",
+        **kwargs: Any
+    ) -> None:
+        self._config = AzureMachineLearningWorkspacesConfiguration(credential=credential, **kwargs)
+        self._client = AsyncARMPipelineClient(base_url=base_url, config=self._config, **kwargs)
+
+        client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)}
+        self._serialize = Serializer(client_models)
+        self._deserialize = Deserializer(client_models)
+        self._serialize.client_side_validation = False
+        self.data_call = DataCallOperations(self._client, self._config, self._serialize, self._deserialize)
+        self.data_container = DataContainerOperations(self._client, self._config, self._serialize, self._deserialize)
+        self.delete = DeleteOperations(self._client, self._config, self._serialize, self._deserialize)
+        self.datasets_v1 = DatasetsV1Operations(self._client, self._config, self._serialize, self._deserialize)
+        self.dataset_controller_v2 = DatasetControllerV2Operations(self._client, self._config, self._serialize, self._deserialize)
+        self.dataset_v2 = DatasetV2Operations(self._client, self._config, self._serialize, self._deserialize)
+        self.data_version = DataVersionOperations(self._client, self._config, self._serialize, self._deserialize)
+        self.get_operation_status = GetOperationStatusOperations(self._client, self._config, self._serialize, self._deserialize)
+
+
+    def _send_request(
+        self,
+        request: HttpRequest,
+        **kwargs: Any
+    ) -> Awaitable[AsyncHttpResponse]:
+        """Runs the network request through the client's chained policies.
+
+        >>> from azure.core.rest import HttpRequest
+        >>> request = HttpRequest("GET", "https://www.example.org/")
+        <HttpRequest [GET], url: 'https://www.example.org/'>
+        >>> response = await client._send_request(request)
+        <AsyncHttpResponse: 200 OK>
+
+        For more information on this code flow, see https://aka.ms/azsdk/python/protocol/quickstart
+
+        :param request: The network request you want to make. Required.
+        :type request: ~azure.core.rest.HttpRequest
+        :keyword bool stream: Whether the response payload will be streamed. Defaults to False.
+        :return: The response of your network call. Does not do error handling on your response.
+        :rtype: ~azure.core.rest.AsyncHttpResponse
+        """
+
+        request_copy = deepcopy(request)
+        request_copy.url = self._client.format_url(request_copy.url)
+        return self._client.send_request(request_copy, **kwargs)
+
+    async def close(self) -> None:
+        await self._client.close()
+
+    async def __aenter__(self) -> "AzureMachineLearningWorkspaces":
+        await self._client.__aenter__()
+        return self
+
+    async def __aexit__(self, *exc_details) -> None:
+        await self._client.__aexit__(*exc_details)
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/dataset_dataplane/aio/_configuration.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/dataset_dataplane/aio/_configuration.py
new file mode 100644
index 00000000..26def54e
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/dataset_dataplane/aio/_configuration.py
@@ -0,0 +1,60 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from typing import Any, TYPE_CHECKING
+
+from azure.core.configuration import Configuration
+from azure.core.pipeline import policies
+from azure.mgmt.core.policies import ARMHttpLoggingPolicy, AsyncARMChallengeAuthenticationPolicy
+
+from .._version import VERSION
+
+if TYPE_CHECKING:
+    # pylint: disable=unused-import,ungrouped-imports
+    from azure.core.credentials_async import AsyncTokenCredential
+
+
+class AzureMachineLearningWorkspacesConfiguration(Configuration):
+    """Configuration for AzureMachineLearningWorkspaces.
+
+    Note that all parameters used to create this instance are saved as instance
+    attributes.
+
+    :param credential: Credential needed for the client to connect to Azure.
+    :type credential: ~azure.core.credentials_async.AsyncTokenCredential
+    """
+
+    def __init__(
+        self,
+        credential: "AsyncTokenCredential",
+        **kwargs: Any
+    ) -> None:
+        super(AzureMachineLearningWorkspacesConfiguration, self).__init__(**kwargs)
+        if credential is None:
+            raise ValueError("Parameter 'credential' must not be None.")
+
+        self.credential = credential
+        self.credential_scopes = kwargs.pop('credential_scopes', ['https://management.azure.com/.default'])
+        kwargs.setdefault('sdk_moniker', 'mgmt-machinelearningservices/{}'.format(VERSION))
+        self._configure(**kwargs)
+
+    def _configure(
+        self,
+        **kwargs: Any
+    ) -> None:
+        self.user_agent_policy = kwargs.get('user_agent_policy') or policies.UserAgentPolicy(**kwargs)
+        self.headers_policy = kwargs.get('headers_policy') or policies.HeadersPolicy(**kwargs)
+        self.proxy_policy = kwargs.get('proxy_policy') or policies.ProxyPolicy(**kwargs)
+        self.logging_policy = kwargs.get('logging_policy') or policies.NetworkTraceLoggingPolicy(**kwargs)
+        self.http_logging_policy = kwargs.get('http_logging_policy') or ARMHttpLoggingPolicy(**kwargs)
+        self.retry_policy = kwargs.get('retry_policy') or policies.AsyncRetryPolicy(**kwargs)
+        self.custom_hook_policy = kwargs.get('custom_hook_policy') or policies.CustomHookPolicy(**kwargs)
+        self.redirect_policy = kwargs.get('redirect_policy') or policies.AsyncRedirectPolicy(**kwargs)
+        self.authentication_policy = kwargs.get('authentication_policy')
+        if self.credential and not self.authentication_policy:
+            self.authentication_policy = AsyncARMChallengeAuthenticationPolicy(self.credential, *self.credential_scopes, **kwargs)
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/dataset_dataplane/aio/_patch.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/dataset_dataplane/aio/_patch.py
new file mode 100644
index 00000000..74e48ecd
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/dataset_dataplane/aio/_patch.py
@@ -0,0 +1,31 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+#
+# Copyright (c) Microsoft Corporation. All rights reserved.
+#
+# The MIT License (MIT)
+#
+# Permission is hereby granted, free of charge, to any person obtaining a copy
+# of this software and associated documentation files (the ""Software""), to
+# deal in the Software without restriction, including without limitation the
+# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+# sell copies of the Software, and to permit persons to whom the Software is
+# furnished to do so, subject to the following conditions:
+#
+# The above copyright notice and this permission notice shall be included in
+# all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+# IN THE SOFTWARE.
+#
+# --------------------------------------------------------------------------
+
+# This file is used for handwritten extensions to the generated code. Example:
+# https://github.com/Azure/azure-sdk-for-python/blob/main/doc/dev/customize_code/how-to-patch-sdk-code.md
+def patch_sdk():
+    pass
\ No newline at end of file
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/dataset_dataplane/aio/operations/__init__.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/dataset_dataplane/aio/operations/__init__.py
new file mode 100644
index 00000000..f0340813
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/dataset_dataplane/aio/operations/__init__.py
@@ -0,0 +1,27 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from ._data_call_operations import DataCallOperations
+from ._data_container_operations import DataContainerOperations
+from ._delete_operations import DeleteOperations
+from ._datasets_v1_operations import DatasetsV1Operations
+from ._dataset_controller_v2_operations import DatasetControllerV2Operations
+from ._dataset_v2_operations import DatasetV2Operations
+from ._data_version_operations import DataVersionOperations
+from ._get_operation_status_operations import GetOperationStatusOperations
+
+__all__ = [
+    'DataCallOperations',
+    'DataContainerOperations',
+    'DeleteOperations',
+    'DatasetsV1Operations',
+    'DatasetControllerV2Operations',
+    'DatasetV2Operations',
+    'DataVersionOperations',
+    'GetOperationStatusOperations',
+]
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/dataset_dataplane/aio/operations/_data_call_operations.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/dataset_dataplane/aio/operations/_data_call_operations.py
new file mode 100644
index 00000000..cf00280c
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/dataset_dataplane/aio/operations/_data_call_operations.py
@@ -0,0 +1,243 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+import functools
+from typing import Any, Callable, Dict, Generic, List, Optional, TypeVar
+import warnings
+
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import AsyncHttpResponse
+from azure.core.rest import HttpRequest
+from azure.core.tracing.decorator_async import distributed_trace_async
+from azure.mgmt.core.exceptions import ARMErrorFormat
+
+from ... import models as _models
+from ..._vendor import _convert_request
+from ...operations._data_call_operations import build_get_preview_for_ml_table_request, build_get_quick_profile_for_ml_table_request, build_get_schema_for_ml_table_request
+T = TypeVar('T')
+ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
+
+class DataCallOperations:
+    """DataCallOperations async operations.
+
+    You should not instantiate this class directly. Instead, you should create a Client instance that
+    instantiates it for you and attaches it as an attribute.
+
+    :ivar models: Alias to model classes used in this operation group.
+    :type models: ~azure.mgmt.machinelearningservices.models
+    :param client: Client for service requests.
+    :param config: Configuration of service client.
+    :param serializer: An object model serializer.
+    :param deserializer: An object model deserializer.
+    """
+
+    models = _models
+
+    def __init__(self, client, config, serializer, deserializer) -> None:
+        self._client = client
+        self._serialize = serializer
+        self._deserialize = deserializer
+        self._config = config
+
+    @distributed_trace_async
+    async def get_schema_for_ml_table(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        body: Optional["_models.DataCallRequest"] = None,
+        **kwargs: Any
+    ) -> List["_models.ColumnDefinition"]:
+        """Get schema for a specific MLTable.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param body:
+        :type body: ~azure.mgmt.machinelearningservices.models.DataCallRequest
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: list of ColumnDefinition, or the result of cls(response)
+        :rtype: list[~azure.mgmt.machinelearningservices.models.ColumnDefinition]
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType[List["_models.ColumnDefinition"]]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        content_type = kwargs.pop('content_type', "application/json")  # type: Optional[str]
+
+        if body is not None:
+            _json = self._serialize.body(body, 'DataCallRequest')
+        else:
+            _json = None
+
+        request = build_get_schema_for_ml_table_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            content_type=content_type,
+            json=_json,
+            template_url=self.get_schema_for_ml_table.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('[ColumnDefinition]', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    get_schema_for_ml_table.metadata = {'url': '/data/v2.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datacall/schema'}  # type: ignore
+
+
+    @distributed_trace_async
+    async def get_preview_for_ml_table(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        body: Optional["_models.DataCallRequest"] = None,
+        **kwargs: Any
+    ) -> "_models.DataViewSetResult":
+        """Get preview for a specific MLTable.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param body:
+        :type body: ~azure.mgmt.machinelearningservices.models.DataCallRequest
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: DataViewSetResult, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.DataViewSetResult
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.DataViewSetResult"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        content_type = kwargs.pop('content_type', "application/json")  # type: Optional[str]
+
+        if body is not None:
+            _json = self._serialize.body(body, 'DataCallRequest')
+        else:
+            _json = None
+
+        request = build_get_preview_for_ml_table_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            content_type=content_type,
+            json=_json,
+            template_url=self.get_preview_for_ml_table.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('DataViewSetResult', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    get_preview_for_ml_table.metadata = {'url': '/data/v2.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datacall/preview'}  # type: ignore
+
+
+    @distributed_trace_async
+    async def get_quick_profile_for_ml_table(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        body: Optional["_models.DataCallRequest"] = None,
+        **kwargs: Any
+    ) -> List["_models.ProfileResult"]:
+        """Get quick profile for a specific MLTable.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param body:
+        :type body: ~azure.mgmt.machinelearningservices.models.DataCallRequest
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: list of ProfileResult, or the result of cls(response)
+        :rtype: list[~azure.mgmt.machinelearningservices.models.ProfileResult]
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType[List["_models.ProfileResult"]]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        content_type = kwargs.pop('content_type', "application/json")  # type: Optional[str]
+
+        if body is not None:
+            _json = self._serialize.body(body, 'DataCallRequest')
+        else:
+            _json = None
+
+        request = build_get_quick_profile_for_ml_table_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            content_type=content_type,
+            json=_json,
+            template_url=self.get_quick_profile_for_ml_table.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('[ProfileResult]', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    get_quick_profile_for_ml_table.metadata = {'url': '/data/v2.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datacall/quickprofile'}  # type: ignore
+
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/dataset_dataplane/aio/operations/_data_container_operations.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/dataset_dataplane/aio/operations/_data_container_operations.py
new file mode 100644
index 00000000..c4c78abb
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/dataset_dataplane/aio/operations/_data_container_operations.py
@@ -0,0 +1,321 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+import functools
+from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar
+import warnings
+
+from azure.core.async_paging import AsyncItemPaged, AsyncList
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import AsyncHttpResponse
+from azure.core.rest import HttpRequest
+from azure.core.tracing.decorator import distributed_trace
+from azure.core.tracing.decorator_async import distributed_trace_async
+from azure.mgmt.core.exceptions import ARMErrorFormat
+
+from ... import models as _models
+from ..._vendor import _convert_request
+from ...operations._data_container_operations import build_create_data_container_request, build_get_data_container_request, build_list_data_container_request, build_modify_data_container_request
+T = TypeVar('T')
+ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
+
+class DataContainerOperations:
+    """DataContainerOperations async operations.
+
+    You should not instantiate this class directly. Instead, you should create a Client instance that
+    instantiates it for you and attaches it as an attribute.
+
+    :ivar models: Alias to model classes used in this operation group.
+    :type models: ~azure.mgmt.machinelearningservices.models
+    :param client: Client for service requests.
+    :param config: Configuration of service client.
+    :param serializer: An object model serializer.
+    :param deserializer: An object model deserializer.
+    """
+
+    models = _models
+
+    def __init__(self, client, config, serializer, deserializer) -> None:
+        self._client = client
+        self._serialize = serializer
+        self._deserialize = deserializer
+        self._config = config
+
+    @distributed_trace_async
+    async def create_data_container(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        body: Optional["_models.DataContainer"] = None,
+        **kwargs: Any
+    ) -> "_models.DataContainerEntity":
+        """create_data_container.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param body:
+        :type body: ~azure.mgmt.machinelearningservices.models.DataContainer
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: DataContainerEntity, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.DataContainerEntity
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.DataContainerEntity"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        content_type = kwargs.pop('content_type', "application/json")  # type: Optional[str]
+
+        if body is not None:
+            _json = self._serialize.body(body, 'DataContainer')
+        else:
+            _json = None
+
+        request = build_create_data_container_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            content_type=content_type,
+            json=_json,
+            template_url=self.create_data_container.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('DataContainerEntity', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    create_data_container.metadata = {'url': '/data/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datacontainer'}  # type: ignore
+
+
+    @distributed_trace
+    def list_data_container(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        **kwargs: Any
+    ) -> AsyncIterable["_models.PaginatedDataContainerEntityList"]:
+        """list_data_container.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: An iterator like instance of either PaginatedDataContainerEntityList or the result of
+         cls(response)
+        :rtype:
+         ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.PaginatedDataContainerEntityList]
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.PaginatedDataContainerEntityList"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+        def prepare_request(next_link=None):
+            if not next_link:
+                
+                request = build_list_data_container_request(
+                    subscription_id=subscription_id,
+                    resource_group_name=resource_group_name,
+                    workspace_name=workspace_name,
+                    template_url=self.list_data_container.metadata['url'],
+                )
+                request = _convert_request(request)
+                request.url = self._client.format_url(request.url)
+
+            else:
+                
+                request = build_list_data_container_request(
+                    subscription_id=subscription_id,
+                    resource_group_name=resource_group_name,
+                    workspace_name=workspace_name,
+                    template_url=next_link,
+                )
+                request = _convert_request(request)
+                request.url = self._client.format_url(request.url)
+                request.method = "GET"
+            return request
+
+        async def extract_data(pipeline_response):
+            deserialized = self._deserialize("PaginatedDataContainerEntityList", pipeline_response)
+            list_of_elem = deserialized.value
+            if cls:
+                list_of_elem = cls(list_of_elem)
+            return deserialized.next_link or None, AsyncList(list_of_elem)
+
+        async def get_next(next_link=None):
+            request = prepare_request(next_link)
+
+            pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+            response = pipeline_response.http_response
+
+            if response.status_code not in [200]:
+                map_error(status_code=response.status_code, response=response, error_map=error_map)
+                error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+                raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+            return pipeline_response
+
+
+        return AsyncItemPaged(
+            get_next, extract_data
+        )
+    list_data_container.metadata = {'url': '/data/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datacontainer'}  # type: ignore
+
+    @distributed_trace_async
+    async def get_data_container(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        name: str,
+        **kwargs: Any
+    ) -> "_models.DataContainerEntity":
+        """get_data_container.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param name:
+        :type name: str
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: DataContainerEntity, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.DataContainerEntity
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.DataContainerEntity"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        
+        request = build_get_data_container_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            name=name,
+            template_url=self.get_data_container.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('DataContainerEntity', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    get_data_container.metadata = {'url': '/data/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datacontainer/{name}'}  # type: ignore
+
+
+    @distributed_trace_async
+    async def modify_data_container(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        name: str,
+        body: Optional["_models.DataContainerMutable"] = None,
+        **kwargs: Any
+    ) -> "_models.DataContainerEntity":
+        """modify_data_container.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param name:
+        :type name: str
+        :param body:
+        :type body: ~azure.mgmt.machinelearningservices.models.DataContainerMutable
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: DataContainerEntity, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.DataContainerEntity
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.DataContainerEntity"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        content_type = kwargs.pop('content_type', "application/json")  # type: Optional[str]
+
+        if body is not None:
+            _json = self._serialize.body(body, 'DataContainerMutable')
+        else:
+            _json = None
+
+        request = build_modify_data_container_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            name=name,
+            content_type=content_type,
+            json=_json,
+            template_url=self.modify_data_container.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('DataContainerEntity', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    modify_data_container.metadata = {'url': '/data/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datacontainer/{name}'}  # type: ignore
+
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/dataset_dataplane/aio/operations/_data_version_operations.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/dataset_dataplane/aio/operations/_data_version_operations.py
new file mode 100644
index 00000000..9f375aeb
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/dataset_dataplane/aio/operations/_data_version_operations.py
@@ -0,0 +1,804 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+import functools
+from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar
+import warnings
+
+from azure.core.async_paging import AsyncItemPaged, AsyncList
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import AsyncHttpResponse
+from azure.core.rest import HttpRequest
+from azure.core.tracing.decorator import distributed_trace
+from azure.core.tracing.decorator_async import distributed_trace_async
+from azure.mgmt.core.exceptions import ARMErrorFormat
+
+from ... import models as _models
+from ..._vendor import _convert_request
+from ...operations._data_version_operations import build_batch_get_resolved_uris_request, build_create_request, build_create_unregistered_input_data_request, build_create_unregistered_output_data_request, build_delete_request, build_exists_request, build_get_by_asset_id_request, build_get_request, build_list_request, build_modify_request, build_registered_existing_data_request
+T = TypeVar('T')
+ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
+
+class DataVersionOperations:
+    """DataVersionOperations async operations.
+
+    You should not instantiate this class directly. Instead, you should create a Client instance that
+    instantiates it for you and attaches it as an attribute.
+
+    :ivar models: Alias to model classes used in this operation group.
+    :type models: ~azure.mgmt.machinelearningservices.models
+    :param client: Client for service requests.
+    :param config: Configuration of service client.
+    :param serializer: An object model serializer.
+    :param deserializer: An object model deserializer.
+    """
+
+    models = _models
+
+    def __init__(self, client, config, serializer, deserializer) -> None:
+        self._client = client
+        self._serialize = serializer
+        self._deserialize = deserializer
+        self._config = config
+
+    @distributed_trace_async
+    async def create(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        name: str,
+        body: Optional["_models.DataVersion"] = None,
+        **kwargs: Any
+    ) -> "_models.DataVersionEntity":
+        """create.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param name:
+        :type name: str
+        :param body:
+        :type body: ~azure.mgmt.machinelearningservices.models.DataVersion
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: DataVersionEntity, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.DataVersionEntity
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.DataVersionEntity"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        content_type = kwargs.pop('content_type', "application/json")  # type: Optional[str]
+
+        if body is not None:
+            _json = self._serialize.body(body, 'DataVersion')
+        else:
+            _json = None
+
+        request = build_create_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            name=name,
+            content_type=content_type,
+            json=_json,
+            template_url=self.create.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('DataVersionEntity', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    create.metadata = {'url': '/data/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/dataversion/{name}/versions'}  # type: ignore
+
+
+    @distributed_trace
+    def list(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        name: str,
+        order_by: Optional[str] = None,
+        top: Optional[int] = None,
+        **kwargs: Any
+    ) -> AsyncIterable["_models.PaginatedDataVersionEntityList"]:
+        """list.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param name:
+        :type name: str
+        :param order_by:
+        :type order_by: str
+        :param top:
+        :type top: int
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: An iterator like instance of either PaginatedDataVersionEntityList or the result of
+         cls(response)
+        :rtype:
+         ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.PaginatedDataVersionEntityList]
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.PaginatedDataVersionEntityList"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+        def prepare_request(next_link=None):
+            if not next_link:
+                
+                request = build_list_request(
+                    subscription_id=subscription_id,
+                    resource_group_name=resource_group_name,
+                    workspace_name=workspace_name,
+                    name=name,
+                    order_by=order_by,
+                    top=top,
+                    template_url=self.list.metadata['url'],
+                )
+                request = _convert_request(request)
+                request.url = self._client.format_url(request.url)
+
+            else:
+                
+                request = build_list_request(
+                    subscription_id=subscription_id,
+                    resource_group_name=resource_group_name,
+                    workspace_name=workspace_name,
+                    name=name,
+                    order_by=order_by,
+                    top=top,
+                    template_url=next_link,
+                )
+                request = _convert_request(request)
+                request.url = self._client.format_url(request.url)
+                request.method = "GET"
+            return request
+
+        async def extract_data(pipeline_response):
+            deserialized = self._deserialize("PaginatedDataVersionEntityList", pipeline_response)
+            list_of_elem = deserialized.value
+            if cls:
+                list_of_elem = cls(list_of_elem)
+            return deserialized.next_link or None, AsyncList(list_of_elem)
+
+        async def get_next(next_link=None):
+            request = prepare_request(next_link)
+
+            pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+            response = pipeline_response.http_response
+
+            if response.status_code not in [200]:
+                map_error(status_code=response.status_code, response=response, error_map=error_map)
+                error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+                raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+            return pipeline_response
+
+
+        return AsyncItemPaged(
+            get_next, extract_data
+        )
+    list.metadata = {'url': '/data/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/dataversion/{name}/versions'}  # type: ignore
+
+    @distributed_trace_async
+    async def get(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        name: str,
+        version: str,
+        **kwargs: Any
+    ) -> "_models.DataVersionEntity":
+        """get.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param name:
+        :type name: str
+        :param version:
+        :type version: str
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: DataVersionEntity, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.DataVersionEntity
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.DataVersionEntity"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        
+        request = build_get_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            name=name,
+            version=version,
+            template_url=self.get.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('DataVersionEntity', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    get.metadata = {'url': '/data/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/dataversion/{name}/versions/{version}'}  # type: ignore
+
+
+    @distributed_trace_async
+    async def modify(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        name: str,
+        version: str,
+        body: Optional["_models.DataVersionMutable"] = None,
+        **kwargs: Any
+    ) -> "_models.DataVersionEntity":
+        """modify.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param name:
+        :type name: str
+        :param version:
+        :type version: str
+        :param body:
+        :type body: ~azure.mgmt.machinelearningservices.models.DataVersionMutable
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: DataVersionEntity, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.DataVersionEntity
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.DataVersionEntity"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        content_type = kwargs.pop('content_type', "application/json")  # type: Optional[str]
+
+        if body is not None:
+            _json = self._serialize.body(body, 'DataVersionMutable')
+        else:
+            _json = None
+
+        request = build_modify_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            name=name,
+            version=version,
+            content_type=content_type,
+            json=_json,
+            template_url=self.modify.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('DataVersionEntity', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    modify.metadata = {'url': '/data/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/dataversion/{name}/versions/{version}'}  # type: ignore
+
+
+    @distributed_trace_async
+    async def delete(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        name: str,
+        version: str,
+        **kwargs: Any
+    ) -> "_models.HttpResponseMessage":
+        """delete.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param name:
+        :type name: str
+        :param version:
+        :type version: str
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: HttpResponseMessage, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.HttpResponseMessage
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.HttpResponseMessage"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        
+        request = build_delete_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            name=name,
+            version=version,
+            template_url=self.delete.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('HttpResponseMessage', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    delete.metadata = {'url': '/data/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/dataversion/{name}/versions/{version}'}  # type: ignore
+
+
+    @distributed_trace_async
+    async def exists(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        name: str,
+        version: str,
+        **kwargs: Any
+    ) -> bool:
+        """exists.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param name:
+        :type name: str
+        :param version:
+        :type version: str
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: bool, or the result of cls(response)
+        :rtype: bool
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType[bool]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        
+        request = build_exists_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            name=name,
+            version=version,
+            template_url=self.exists.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('bool', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    exists.metadata = {'url': '/data/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/dataversion/{name}/versions/{version}/exists'}  # type: ignore
+
+
+    @distributed_trace_async
+    async def get_by_asset_id(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        body: Optional["_models.AssetId"] = None,
+        **kwargs: Any
+    ) -> "_models.DataVersionEntity":
+        """get_by_asset_id.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param body:
+        :type body: ~azure.mgmt.machinelearningservices.models.AssetId
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: DataVersionEntity, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.DataVersionEntity
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.DataVersionEntity"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        content_type = kwargs.pop('content_type', "application/json")  # type: Optional[str]
+
+        if body is not None:
+            _json = self._serialize.body(body, 'AssetId')
+        else:
+            _json = None
+
+        request = build_get_by_asset_id_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            content_type=content_type,
+            json=_json,
+            template_url=self.get_by_asset_id.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('DataVersionEntity', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    get_by_asset_id.metadata = {'url': '/data/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/dataversion/getByAssetId'}  # type: ignore
+
+
+    @distributed_trace_async
+    async def create_unregistered_input_data(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        body: Optional["_models.CreateUnregisteredInputData"] = None,
+        **kwargs: Any
+    ) -> "_models.DataContainerEntity":
+        """create_unregistered_input_data.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param body:
+        :type body: ~azure.mgmt.machinelearningservices.models.CreateUnregisteredInputData
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: DataContainerEntity, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.DataContainerEntity
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.DataContainerEntity"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        content_type = kwargs.pop('content_type', "application/json")  # type: Optional[str]
+
+        if body is not None:
+            _json = self._serialize.body(body, 'CreateUnregisteredInputData')
+        else:
+            _json = None
+
+        request = build_create_unregistered_input_data_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            content_type=content_type,
+            json=_json,
+            template_url=self.create_unregistered_input_data.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('DataContainerEntity', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    create_unregistered_input_data.metadata = {'url': '/data/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/dataversion/createUnregisteredInput'}  # type: ignore
+
+
+    @distributed_trace_async
+    async def create_unregistered_output_data(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        body: Optional["_models.CreateUnregisteredOutputData"] = None,
+        **kwargs: Any
+    ) -> "_models.DataContainerEntity":
+        """create_unregistered_output_data.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param body:
+        :type body: ~azure.mgmt.machinelearningservices.models.CreateUnregisteredOutputData
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: DataContainerEntity, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.DataContainerEntity
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.DataContainerEntity"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        content_type = kwargs.pop('content_type', "application/json")  # type: Optional[str]
+
+        if body is not None:
+            _json = self._serialize.body(body, 'CreateUnregisteredOutputData')
+        else:
+            _json = None
+
+        request = build_create_unregistered_output_data_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            content_type=content_type,
+            json=_json,
+            template_url=self.create_unregistered_output_data.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('DataContainerEntity', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    create_unregistered_output_data.metadata = {'url': '/data/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/dataversion/createUnregisteredOutput'}  # type: ignore
+
+
+    @distributed_trace_async
+    async def registered_existing_data(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        body: Optional["_models.RegisterExistingData"] = None,
+        **kwargs: Any
+    ) -> "_models.DataContainerEntity":
+        """registered_existing_data.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param body:
+        :type body: ~azure.mgmt.machinelearningservices.models.RegisterExistingData
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: DataContainerEntity, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.DataContainerEntity
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.DataContainerEntity"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        content_type = kwargs.pop('content_type', "application/json")  # type: Optional[str]
+
+        if body is not None:
+            _json = self._serialize.body(body, 'RegisterExistingData')
+        else:
+            _json = None
+
+        request = build_registered_existing_data_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            content_type=content_type,
+            json=_json,
+            template_url=self.registered_existing_data.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('DataContainerEntity', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    registered_existing_data.metadata = {'url': '/data/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/dataversion/registerExisting'}  # type: ignore
+
+
+    @distributed_trace_async
+    async def batch_get_resolved_uris(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        body: Optional["_models.BatchGetResolvedURIs"] = None,
+        **kwargs: Any
+    ) -> "_models.BatchDataUriResponse":
+        """batch_get_resolved_uris.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param body:
+        :type body: ~azure.mgmt.machinelearningservices.models.BatchGetResolvedURIs
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: BatchDataUriResponse, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.BatchDataUriResponse
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.BatchDataUriResponse"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        content_type = kwargs.pop('content_type', "application/json")  # type: Optional[str]
+
+        if body is not None:
+            _json = self._serialize.body(body, 'BatchGetResolvedURIs')
+        else:
+            _json = None
+
+        request = build_batch_get_resolved_uris_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            content_type=content_type,
+            json=_json,
+            template_url=self.batch_get_resolved_uris.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('BatchDataUriResponse', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    batch_get_resolved_uris.metadata = {'url': '/data/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/dataversion/batchGetResolvedUris'}  # type: ignore
+
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/dataset_dataplane/aio/operations/_dataset_controller_v2_operations.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/dataset_dataplane/aio/operations/_dataset_controller_v2_operations.py
new file mode 100644
index 00000000..a3574be3
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/dataset_dataplane/aio/operations/_dataset_controller_v2_operations.py
@@ -0,0 +1,845 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+import functools
+from typing import Any, AsyncIterable, Callable, Dict, Generic, List, Optional, TypeVar
+import warnings
+
+from azure.core.async_paging import AsyncItemPaged, AsyncList
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import AsyncHttpResponse
+from azure.core.rest import HttpRequest
+from azure.core.tracing.decorator import distributed_trace
+from azure.core.tracing.decorator_async import distributed_trace_async
+from azure.mgmt.core.exceptions import ARMErrorFormat
+
+from ... import models as _models
+from ..._vendor import _convert_request
+from ...operations._dataset_controller_v2_operations import build_delete_all_datasets_request, build_get_all_dataset_definitions_request, build_get_all_dataset_versions_request, build_get_dataset_by_name_request, build_get_dataset_definition_request, build_list_request, build_register_request, build_unregister_dataset_request, build_update_dataset_request, build_update_definition_request
+T = TypeVar('T')
+ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
+
+class DatasetControllerV2Operations:
+    """DatasetControllerV2Operations async operations.
+
+    You should not instantiate this class directly. Instead, you should create a Client instance that
+    instantiates it for you and attaches it as an attribute.
+
+    :ivar models: Alias to model classes used in this operation group.
+    :type models: ~azure.mgmt.machinelearningservices.models
+    :param client: Client for service requests.
+    :param config: Configuration of service client.
+    :param serializer: An object model serializer.
+    :param deserializer: An object model deserializer.
+    """
+
+    models = _models
+
+    def __init__(self, client, config, serializer, deserializer) -> None:
+        self._client = client
+        self._serialize = serializer
+        self._deserialize = deserializer
+        self._config = config
+
+    @distributed_trace_async
+    async def get_dataset_definition(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        dataset_id: str,
+        version: str,
+        **kwargs: Any
+    ) -> "_models.DatasetDefinition":
+        """Get a specific dataset definition.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param dataset_id:
+        :type dataset_id: str
+        :param version:
+        :type version: str
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: DatasetDefinition, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.DatasetDefinition
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.DatasetDefinition"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        
+        request = build_get_dataset_definition_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            dataset_id=dataset_id,
+            version=version,
+            template_url=self.get_dataset_definition.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('DatasetDefinition', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    get_dataset_definition.metadata = {'url': '/dataset/v1.2/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datasets/{datasetId}/definitions/{version}'}  # type: ignore
+
+
+    @distributed_trace
+    def get_all_dataset_definitions(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        dataset_id: str,
+        continuation_token_parameter: Optional[str] = None,
+        page_size: Optional[int] = None,
+        **kwargs: Any
+    ) -> AsyncIterable["_models.PaginatedDatasetDefinitionList"]:
+        """Get all dataset definitions for a given dataset.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param dataset_id:
+        :type dataset_id: str
+        :param continuation_token_parameter:
+        :type continuation_token_parameter: str
+        :param page_size:
+        :type page_size: int
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: An iterator like instance of either PaginatedDatasetDefinitionList or the result of
+         cls(response)
+        :rtype:
+         ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.PaginatedDatasetDefinitionList]
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.PaginatedDatasetDefinitionList"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+        def prepare_request(next_link=None):
+            if not next_link:
+                
+                request = build_get_all_dataset_definitions_request(
+                    subscription_id=subscription_id,
+                    resource_group_name=resource_group_name,
+                    workspace_name=workspace_name,
+                    dataset_id=dataset_id,
+                    continuation_token_parameter=continuation_token_parameter,
+                    page_size=page_size,
+                    template_url=self.get_all_dataset_definitions.metadata['url'],
+                )
+                request = _convert_request(request)
+                request.url = self._client.format_url(request.url)
+
+            else:
+                
+                request = build_get_all_dataset_definitions_request(
+                    subscription_id=subscription_id,
+                    resource_group_name=resource_group_name,
+                    workspace_name=workspace_name,
+                    dataset_id=dataset_id,
+                    continuation_token_parameter=continuation_token_parameter,
+                    page_size=page_size,
+                    template_url=next_link,
+                )
+                request = _convert_request(request)
+                request.url = self._client.format_url(request.url)
+                request.method = "GET"
+            return request
+
+        async def extract_data(pipeline_response):
+            deserialized = self._deserialize("PaginatedDatasetDefinitionList", pipeline_response)
+            list_of_elem = deserialized.value
+            if cls:
+                list_of_elem = cls(list_of_elem)
+            return deserialized.next_link or None, AsyncList(list_of_elem)
+
+        async def get_next(next_link=None):
+            request = prepare_request(next_link)
+
+            pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+            response = pipeline_response.http_response
+
+            if response.status_code not in [200]:
+                map_error(status_code=response.status_code, response=response, error_map=error_map)
+                error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+                raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+            return pipeline_response
+
+
+        return AsyncItemPaged(
+            get_next, extract_data
+        )
+    get_all_dataset_definitions.metadata = {'url': '/dataset/v1.2/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datasets/{datasetId}/definitions'}  # type: ignore
+
+    @distributed_trace_async
+    async def update_definition(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        dataset_id: str,
+        register_as_pending: Optional[bool] = False,
+        force_update: Optional[bool] = False,
+        dataset_type: Optional[str] = None,
+        user_version_id: Optional[str] = None,
+        body: Optional["_models.DatasetDefinition"] = None,
+        **kwargs: Any
+    ) -> "_models.Dataset":
+        """Update a dataset definition.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param dataset_id:
+        :type dataset_id: str
+        :param register_as_pending:
+        :type register_as_pending: bool
+        :param force_update:
+        :type force_update: bool
+        :param dataset_type:
+        :type dataset_type: str
+        :param user_version_id:
+        :type user_version_id: str
+        :param body:
+        :type body: ~azure.mgmt.machinelearningservices.models.DatasetDefinition
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: Dataset, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.Dataset
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.Dataset"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        content_type = kwargs.pop('content_type', "application/json")  # type: Optional[str]
+
+        if body is not None:
+            _json = self._serialize.body(body, 'DatasetDefinition')
+        else:
+            _json = None
+
+        request = build_update_definition_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            dataset_id=dataset_id,
+            content_type=content_type,
+            json=_json,
+            register_as_pending=register_as_pending,
+            force_update=force_update,
+            dataset_type=dataset_type,
+            user_version_id=user_version_id,
+            template_url=self.update_definition.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('Dataset', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    update_definition.metadata = {'url': '/dataset/v1.2/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datasets/{datasetId}/definitions'}  # type: ignore
+
+
+    @distributed_trace
+    def get_all_dataset_versions(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        dataset_id: str,
+        continuation_token_parameter: Optional[str] = None,
+        page_size: Optional[int] = None,
+        **kwargs: Any
+    ) -> AsyncIterable["_models.PaginatedStringList"]:
+        """Get all dataset versions for a given dataset.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param dataset_id:
+        :type dataset_id: str
+        :param continuation_token_parameter:
+        :type continuation_token_parameter: str
+        :param page_size:
+        :type page_size: int
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: An iterator like instance of either PaginatedStringList or the result of cls(response)
+        :rtype:
+         ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.PaginatedStringList]
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.PaginatedStringList"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+        def prepare_request(next_link=None):
+            if not next_link:
+                
+                request = build_get_all_dataset_versions_request(
+                    subscription_id=subscription_id,
+                    resource_group_name=resource_group_name,
+                    workspace_name=workspace_name,
+                    dataset_id=dataset_id,
+                    continuation_token_parameter=continuation_token_parameter,
+                    page_size=page_size,
+                    template_url=self.get_all_dataset_versions.metadata['url'],
+                )
+                request = _convert_request(request)
+                request.url = self._client.format_url(request.url)
+
+            else:
+                
+                request = build_get_all_dataset_versions_request(
+                    subscription_id=subscription_id,
+                    resource_group_name=resource_group_name,
+                    workspace_name=workspace_name,
+                    dataset_id=dataset_id,
+                    continuation_token_parameter=continuation_token_parameter,
+                    page_size=page_size,
+                    template_url=next_link,
+                )
+                request = _convert_request(request)
+                request.url = self._client.format_url(request.url)
+                request.method = "GET"
+            return request
+
+        async def extract_data(pipeline_response):
+            deserialized = self._deserialize("PaginatedStringList", pipeline_response)
+            list_of_elem = deserialized.value
+            if cls:
+                list_of_elem = cls(list_of_elem)
+            return deserialized.next_link or None, AsyncList(list_of_elem)
+
+        async def get_next(next_link=None):
+            request = prepare_request(next_link)
+
+            pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+            response = pipeline_response.http_response
+
+            if response.status_code not in [200]:
+                map_error(status_code=response.status_code, response=response, error_map=error_map)
+                error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+                raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+            return pipeline_response
+
+
+        return AsyncItemPaged(
+            get_next, extract_data
+        )
+    get_all_dataset_versions.metadata = {'url': '/dataset/v1.2/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datasets/{datasetId}/versions'}  # type: ignore
+
+    @distributed_trace_async
+    async def get_dataset_by_name(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        dataset_name: str,
+        version_id: Optional[str] = None,
+        include_latest_definition: Optional[bool] = True,
+        **kwargs: Any
+    ) -> "_models.Dataset":
+        """Get a dataset for a given dataset name.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param dataset_name:
+        :type dataset_name: str
+        :param version_id:
+        :type version_id: str
+        :param include_latest_definition:
+        :type include_latest_definition: bool
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: Dataset, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.Dataset
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.Dataset"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        
+        request = build_get_dataset_by_name_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            dataset_name=dataset_name,
+            version_id=version_id,
+            include_latest_definition=include_latest_definition,
+            template_url=self.get_dataset_by_name.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('Dataset', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    get_dataset_by_name.metadata = {'url': '/dataset/v1.2/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datasets/query/name={datasetName}'}  # type: ignore
+
+
+    @distributed_trace
+    def list(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        dataset_names: Optional[List[str]] = None,
+        search_text: Optional[str] = None,
+        include_invisible: Optional[bool] = False,
+        status: Optional[str] = None,
+        continuation_token_parameter: Optional[str] = None,
+        page_size: Optional[int] = None,
+        include_latest_definition: Optional[bool] = False,
+        order_by: Optional[str] = None,
+        order_by_asc: Optional[bool] = False,
+        dataset_types: Optional[List[str]] = None,
+        **kwargs: Any
+    ) -> AsyncIterable["_models.PaginatedDatasetList"]:
+        """Get a list of datasets.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param dataset_names:
+        :type dataset_names: list[str]
+        :param search_text:
+        :type search_text: str
+        :param include_invisible:
+        :type include_invisible: bool
+        :param status:
+        :type status: str
+        :param continuation_token_parameter:
+        :type continuation_token_parameter: str
+        :param page_size:
+        :type page_size: int
+        :param include_latest_definition:
+        :type include_latest_definition: bool
+        :param order_by:
+        :type order_by: str
+        :param order_by_asc:
+        :type order_by_asc: bool
+        :param dataset_types:
+        :type dataset_types: list[str]
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: An iterator like instance of either PaginatedDatasetList or the result of
+         cls(response)
+        :rtype:
+         ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.PaginatedDatasetList]
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.PaginatedDatasetList"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+        def prepare_request(next_link=None):
+            if not next_link:
+                
+                request = build_list_request(
+                    subscription_id=subscription_id,
+                    resource_group_name=resource_group_name,
+                    workspace_name=workspace_name,
+                    dataset_names=dataset_names,
+                    search_text=search_text,
+                    include_invisible=include_invisible,
+                    status=status,
+                    continuation_token_parameter=continuation_token_parameter,
+                    page_size=page_size,
+                    include_latest_definition=include_latest_definition,
+                    order_by=order_by,
+                    order_by_asc=order_by_asc,
+                    dataset_types=dataset_types,
+                    template_url=self.list.metadata['url'],
+                )
+                request = _convert_request(request)
+                request.url = self._client.format_url(request.url)
+
+            else:
+                
+                request = build_list_request(
+                    subscription_id=subscription_id,
+                    resource_group_name=resource_group_name,
+                    workspace_name=workspace_name,
+                    dataset_names=dataset_names,
+                    search_text=search_text,
+                    include_invisible=include_invisible,
+                    status=status,
+                    continuation_token_parameter=continuation_token_parameter,
+                    page_size=page_size,
+                    include_latest_definition=include_latest_definition,
+                    order_by=order_by,
+                    order_by_asc=order_by_asc,
+                    dataset_types=dataset_types,
+                    template_url=next_link,
+                )
+                request = _convert_request(request)
+                request.url = self._client.format_url(request.url)
+                request.method = "GET"
+            return request
+
+        async def extract_data(pipeline_response):
+            deserialized = self._deserialize("PaginatedDatasetList", pipeline_response)
+            list_of_elem = deserialized.value
+            if cls:
+                list_of_elem = cls(list_of_elem)
+            return deserialized.next_link or None, AsyncList(list_of_elem)
+
+        async def get_next(next_link=None):
+            request = prepare_request(next_link)
+
+            pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+            response = pipeline_response.http_response
+
+            if response.status_code not in [200]:
+                map_error(status_code=response.status_code, response=response, error_map=error_map)
+                error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+                raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+            return pipeline_response
+
+
+        return AsyncItemPaged(
+            get_next, extract_data
+        )
+    list.metadata = {'url': '/dataset/v1.2/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datasets'}  # type: ignore
+
+    @distributed_trace_async
+    async def register(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        register_as_pending: Optional[bool] = False,
+        if_exists_ok: Optional[bool] = True,
+        update_definition_if_exists: Optional[bool] = False,
+        with_data_hash: Optional[bool] = False,
+        user_version_id: Optional[str] = None,
+        body: Optional["_models.Dataset"] = None,
+        **kwargs: Any
+    ) -> "_models.Dataset":
+        """Register new dataset.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param register_as_pending:
+        :type register_as_pending: bool
+        :param if_exists_ok:
+        :type if_exists_ok: bool
+        :param update_definition_if_exists:
+        :type update_definition_if_exists: bool
+        :param with_data_hash:
+        :type with_data_hash: bool
+        :param user_version_id:
+        :type user_version_id: str
+        :param body:
+        :type body: ~azure.mgmt.machinelearningservices.models.Dataset
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: Dataset, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.Dataset
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.Dataset"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        content_type = kwargs.pop('content_type', "application/json")  # type: Optional[str]
+
+        if body is not None:
+            _json = self._serialize.body(body, 'Dataset')
+        else:
+            _json = None
+
+        request = build_register_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            content_type=content_type,
+            json=_json,
+            register_as_pending=register_as_pending,
+            if_exists_ok=if_exists_ok,
+            update_definition_if_exists=update_definition_if_exists,
+            with_data_hash=with_data_hash,
+            user_version_id=user_version_id,
+            template_url=self.register.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('Dataset', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    register.metadata = {'url': '/dataset/v1.2/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datasets'}  # type: ignore
+
+
+    @distributed_trace_async
+    async def delete_all_datasets(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        **kwargs: Any
+    ) -> None:
+        """Unregister all datasets in the workspace.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: None, or the result of cls(response)
+        :rtype: None
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType[None]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        
+        request = build_delete_all_datasets_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            template_url=self.delete_all_datasets.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+        response = pipeline_response.http_response
+
+        if response.status_code not in []:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        if cls:
+            return cls(pipeline_response, None, {})
+
+    delete_all_datasets.metadata = {'url': '/dataset/v1.2/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datasets'}  # type: ignore
+
+
+    @distributed_trace_async
+    async def update_dataset(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        dataset_id: str,
+        force_update: Optional[bool] = False,
+        body: Optional["_models.Dataset"] = None,
+        **kwargs: Any
+    ) -> "_models.Dataset":
+        """Update a dataset.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param dataset_id:
+        :type dataset_id: str
+        :param force_update:
+        :type force_update: bool
+        :param body:
+        :type body: ~azure.mgmt.machinelearningservices.models.Dataset
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: Dataset, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.Dataset
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.Dataset"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        content_type = kwargs.pop('content_type', "application/json")  # type: Optional[str]
+
+        if body is not None:
+            _json = self._serialize.body(body, 'Dataset')
+        else:
+            _json = None
+
+        request = build_update_dataset_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            dataset_id=dataset_id,
+            content_type=content_type,
+            json=_json,
+            force_update=force_update,
+            template_url=self.update_dataset.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('Dataset', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    update_dataset.metadata = {'url': '/dataset/v1.2/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datasets/{datasetId}'}  # type: ignore
+
+
+    @distributed_trace_async
+    async def unregister_dataset(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        name: str,
+        **kwargs: Any
+    ) -> None:
+        """Unregister a dataset.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param name:
+        :type name: str
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: None, or the result of cls(response)
+        :rtype: None
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType[None]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        
+        request = build_unregister_dataset_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            name=name,
+            template_url=self.unregister_dataset.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+        response = pipeline_response.http_response
+
+        if response.status_code not in []:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        if cls:
+            return cls(pipeline_response, None, {})
+
+    unregister_dataset.metadata = {'url': '/dataset/v1.2/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datasets/{name}'}  # type: ignore
+
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/dataset_dataplane/aio/operations/_dataset_v2_operations.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/dataset_dataplane/aio/operations/_dataset_v2_operations.py
new file mode 100644
index 00000000..584122c1
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/dataset_dataplane/aio/operations/_dataset_v2_operations.py
@@ -0,0 +1,592 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+import functools
+from typing import Any, AsyncIterable, Callable, Dict, Generic, List, Optional, TypeVar
+import warnings
+
+from azure.core.async_paging import AsyncItemPaged, AsyncList
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import AsyncHttpResponse
+from azure.core.rest import HttpRequest
+from azure.core.tracing.decorator import distributed_trace
+from azure.core.tracing.decorator_async import distributed_trace_async
+from azure.mgmt.core.exceptions import ARMErrorFormat
+
+from ... import models as _models
+from ..._vendor import _convert_request
+from ...operations._dataset_v2_operations import build_create_request, build_delete_all_datasets_request, build_delete_dataset_by_name_request, build_get_dataset_by_id_request, build_get_dataset_by_name_request, build_list_request, build_update_dataset_by_name_and_version_request, build_update_dataset_request
+T = TypeVar('T')
+ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
+
+class DatasetV2Operations:
+    """DatasetV2Operations async operations.
+
+    You should not instantiate this class directly. Instead, you should create a Client instance that
+    instantiates it for you and attaches it as an attribute.
+
+    :ivar models: Alias to model classes used in this operation group.
+    :type models: ~azure.mgmt.machinelearningservices.models
+    :param client: Client for service requests.
+    :param config: Configuration of service client.
+    :param serializer: An object model serializer.
+    :param deserializer: An object model deserializer.
+    """
+
+    models = _models
+
+    def __init__(self, client, config, serializer, deserializer) -> None:
+        self._client = client
+        self._serialize = serializer
+        self._deserialize = deserializer
+        self._config = config
+
+    @distributed_trace_async
+    async def create(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        if_exists_update: Optional[bool] = False,
+        body: Optional["_models.DatasetV2"] = None,
+        **kwargs: Any
+    ) -> "_models.DatasetV2":
+        """Create new dataset.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param if_exists_update:
+        :type if_exists_update: bool
+        :param body:
+        :type body: ~azure.mgmt.machinelearningservices.models.DatasetV2
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: DatasetV2, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.DatasetV2
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.DatasetV2"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        content_type = kwargs.pop('content_type', "application/json")  # type: Optional[str]
+
+        if body is not None:
+            _json = self._serialize.body(body, 'DatasetV2')
+        else:
+            _json = None
+
+        request = build_create_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            content_type=content_type,
+            json=_json,
+            if_exists_update=if_exists_update,
+            template_url=self.create.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('DatasetV2', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    create.metadata = {'url': '/dataset/v2.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datasets'}  # type: ignore
+
+
+    @distributed_trace_async
+    async def delete_all_datasets(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        **kwargs: Any
+    ) -> None:
+        """Delete all datasets in the workspace.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: None, or the result of cls(response)
+        :rtype: None
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType[None]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        
+        request = build_delete_all_datasets_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            template_url=self.delete_all_datasets.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+        response = pipeline_response.http_response
+
+        if response.status_code not in []:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        if cls:
+            return cls(pipeline_response, None, {})
+
+    delete_all_datasets.metadata = {'url': '/dataset/v2.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datasets'}  # type: ignore
+
+
+    @distributed_trace
+    def list(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        names: Optional[List[str]] = None,
+        search_text: Optional[str] = None,
+        continuation_token_parameter: Optional[str] = None,
+        page_size: Optional[int] = None,
+        **kwargs: Any
+    ) -> AsyncIterable["_models.PaginatedDatasetV2List"]:
+        """Get a list of datasets.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param names:
+        :type names: list[str]
+        :param search_text:
+        :type search_text: str
+        :param continuation_token_parameter:
+        :type continuation_token_parameter: str
+        :param page_size:
+        :type page_size: int
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: An iterator like instance of either PaginatedDatasetV2List or the result of
+         cls(response)
+        :rtype:
+         ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.PaginatedDatasetV2List]
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.PaginatedDatasetV2List"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+        def prepare_request(next_link=None):
+            if not next_link:
+                
+                request = build_list_request(
+                    subscription_id=subscription_id,
+                    resource_group_name=resource_group_name,
+                    workspace_name=workspace_name,
+                    names=names,
+                    search_text=search_text,
+                    continuation_token_parameter=continuation_token_parameter,
+                    page_size=page_size,
+                    template_url=self.list.metadata['url'],
+                )
+                request = _convert_request(request)
+                request.url = self._client.format_url(request.url)
+
+            else:
+                
+                request = build_list_request(
+                    subscription_id=subscription_id,
+                    resource_group_name=resource_group_name,
+                    workspace_name=workspace_name,
+                    names=names,
+                    search_text=search_text,
+                    continuation_token_parameter=continuation_token_parameter,
+                    page_size=page_size,
+                    template_url=next_link,
+                )
+                request = _convert_request(request)
+                request.url = self._client.format_url(request.url)
+                request.method = "GET"
+            return request
+
+        async def extract_data(pipeline_response):
+            deserialized = self._deserialize("PaginatedDatasetV2List", pipeline_response)
+            list_of_elem = deserialized.value
+            if cls:
+                list_of_elem = cls(list_of_elem)
+            return deserialized.next_link or None, AsyncList(list_of_elem)
+
+        async def get_next(next_link=None):
+            request = prepare_request(next_link)
+
+            pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+            response = pipeline_response.http_response
+
+            if response.status_code not in [200]:
+                map_error(status_code=response.status_code, response=response, error_map=error_map)
+                error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+                raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+            return pipeline_response
+
+
+        return AsyncItemPaged(
+            get_next, extract_data
+        )
+    list.metadata = {'url': '/dataset/v2.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datasets'}  # type: ignore
+
+    @distributed_trace_async
+    async def delete_dataset_by_name(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        name: str,
+        version_id: str,
+        **kwargs: Any
+    ) -> None:
+        """Delete a dataset.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param name:
+        :type name: str
+        :param version_id:
+        :type version_id: str
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: None, or the result of cls(response)
+        :rtype: None
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType[None]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        
+        request = build_delete_dataset_by_name_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            name=name,
+            version_id=version_id,
+            template_url=self.delete_dataset_by_name.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+        response = pipeline_response.http_response
+
+        if response.status_code not in []:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        if cls:
+            return cls(pipeline_response, None, {})
+
+    delete_dataset_by_name.metadata = {'url': '/dataset/v2.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datasets/{name}/versions/{versionId}'}  # type: ignore
+
+
+    @distributed_trace_async
+    async def update_dataset_by_name_and_version(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        name: str,
+        version_id: str,
+        body: Optional["_models.DatasetV2"] = None,
+        **kwargs: Any
+    ) -> "_models.DatasetV2":
+        """Update a dataset by its name and version.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param name:
+        :type name: str
+        :param version_id:
+        :type version_id: str
+        :param body:
+        :type body: ~azure.mgmt.machinelearningservices.models.DatasetV2
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: DatasetV2, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.DatasetV2
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.DatasetV2"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        content_type = kwargs.pop('content_type', "application/json")  # type: Optional[str]
+
+        if body is not None:
+            _json = self._serialize.body(body, 'DatasetV2')
+        else:
+            _json = None
+
+        request = build_update_dataset_by_name_and_version_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            name=name,
+            version_id=version_id,
+            content_type=content_type,
+            json=_json,
+            template_url=self.update_dataset_by_name_and_version.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('DatasetV2', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    update_dataset_by_name_and_version.metadata = {'url': '/dataset/v2.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datasets/{name}/versions/{versionId}'}  # type: ignore
+
+
+    @distributed_trace_async
+    async def get_dataset_by_id(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        dataset_id: str,
+        **kwargs: Any
+    ) -> "_models.DatasetV2":
+        """Get a dataset for a given dataset id.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param dataset_id:
+        :type dataset_id: str
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: DatasetV2, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.DatasetV2
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.DatasetV2"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        
+        request = build_get_dataset_by_id_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            dataset_id=dataset_id,
+            template_url=self.get_dataset_by_id.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('DatasetV2', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    get_dataset_by_id.metadata = {'url': '/dataset/v2.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datasets/{datasetId}'}  # type: ignore
+
+
+    @distributed_trace_async
+    async def update_dataset(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        dataset_id: str,
+        body: Optional["_models.DatasetV2"] = None,
+        **kwargs: Any
+    ) -> "_models.DatasetV2":
+        """Update a dataset.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param dataset_id:
+        :type dataset_id: str
+        :param body:
+        :type body: ~azure.mgmt.machinelearningservices.models.DatasetV2
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: DatasetV2, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.DatasetV2
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.DatasetV2"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        content_type = kwargs.pop('content_type', "application/json")  # type: Optional[str]
+
+        if body is not None:
+            _json = self._serialize.body(body, 'DatasetV2')
+        else:
+            _json = None
+
+        request = build_update_dataset_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            dataset_id=dataset_id,
+            content_type=content_type,
+            json=_json,
+            template_url=self.update_dataset.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('DatasetV2', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    update_dataset.metadata = {'url': '/dataset/v2.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datasets/{datasetId}'}  # type: ignore
+
+
+    @distributed_trace_async
+    async def get_dataset_by_name(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        dataset_name: str,
+        version_id: Optional[str] = None,
+        **kwargs: Any
+    ) -> "_models.DatasetV2":
+        """Get a dataset for a given dataset name.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param dataset_name:
+        :type dataset_name: str
+        :param version_id:
+        :type version_id: str
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: DatasetV2, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.DatasetV2
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.DatasetV2"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        
+        request = build_get_dataset_by_name_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            dataset_name=dataset_name,
+            version_id=version_id,
+            template_url=self.get_dataset_by_name.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('DatasetV2', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    get_dataset_by_name.metadata = {'url': '/dataset/v2.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datasets/query/name={datasetName}'}  # type: ignore
+
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/dataset_dataplane/aio/operations/_datasets_v1_operations.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/dataset_dataplane/aio/operations/_datasets_v1_operations.py
new file mode 100644
index 00000000..97e74d53
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/dataset_dataplane/aio/operations/_datasets_v1_operations.py
@@ -0,0 +1,845 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+import functools
+from typing import Any, AsyncIterable, Callable, Dict, Generic, List, Optional, TypeVar
+import warnings
+
+from azure.core.async_paging import AsyncItemPaged, AsyncList
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import AsyncHttpResponse
+from azure.core.rest import HttpRequest
+from azure.core.tracing.decorator import distributed_trace
+from azure.core.tracing.decorator_async import distributed_trace_async
+from azure.mgmt.core.exceptions import ARMErrorFormat
+
+from ... import models as _models
+from ..._vendor import _convert_request
+from ...operations._datasets_v1_operations import build_delete_all_datasets_request, build_get_all_dataset_definitions_request, build_get_all_dataset_versions_request, build_get_dataset_by_name_request, build_get_dataset_definition_request, build_list_request, build_register_request, build_unregister_dataset_request, build_update_dataset_request, build_update_definition_request
+T = TypeVar('T')
+ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
+
+class DatasetsV1Operations:
+    """DatasetsV1Operations async operations.
+
+    You should not instantiate this class directly. Instead, you should create a Client instance that
+    instantiates it for you and attaches it as an attribute.
+
+    :ivar models: Alias to model classes used in this operation group.
+    :type models: ~azure.mgmt.machinelearningservices.models
+    :param client: Client for service requests.
+    :param config: Configuration of service client.
+    :param serializer: An object model serializer.
+    :param deserializer: An object model deserializer.
+    """
+
+    models = _models
+
+    def __init__(self, client, config, serializer, deserializer) -> None:
+        self._client = client
+        self._serialize = serializer
+        self._deserialize = deserializer
+        self._config = config
+
+    @distributed_trace_async
+    async def get_dataset_definition(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        dataset_id: str,
+        version: str,
+        **kwargs: Any
+    ) -> "_models.DatasetDefinition":
+        """Get a specific dataset definition.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param dataset_id:
+        :type dataset_id: str
+        :param version:
+        :type version: str
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: DatasetDefinition, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.DatasetDefinition
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.DatasetDefinition"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        
+        request = build_get_dataset_definition_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            dataset_id=dataset_id,
+            version=version,
+            template_url=self.get_dataset_definition.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('DatasetDefinition', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    get_dataset_definition.metadata = {'url': '/dataset/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datasets/{datasetId}/definitions/{version}'}  # type: ignore
+
+
+    @distributed_trace
+    def get_all_dataset_definitions(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        dataset_id: str,
+        continuation_token_parameter: Optional[str] = None,
+        page_size: Optional[int] = None,
+        **kwargs: Any
+    ) -> AsyncIterable["_models.PaginatedDatasetDefinitionList"]:
+        """Get all dataset definitions for a given dataset.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param dataset_id:
+        :type dataset_id: str
+        :param continuation_token_parameter:
+        :type continuation_token_parameter: str
+        :param page_size:
+        :type page_size: int
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: An iterator like instance of either PaginatedDatasetDefinitionList or the result of
+         cls(response)
+        :rtype:
+         ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.PaginatedDatasetDefinitionList]
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.PaginatedDatasetDefinitionList"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+        def prepare_request(next_link=None):
+            if not next_link:
+                
+                request = build_get_all_dataset_definitions_request(
+                    subscription_id=subscription_id,
+                    resource_group_name=resource_group_name,
+                    workspace_name=workspace_name,
+                    dataset_id=dataset_id,
+                    continuation_token_parameter=continuation_token_parameter,
+                    page_size=page_size,
+                    template_url=self.get_all_dataset_definitions.metadata['url'],
+                )
+                request = _convert_request(request)
+                request.url = self._client.format_url(request.url)
+
+            else:
+                
+                request = build_get_all_dataset_definitions_request(
+                    subscription_id=subscription_id,
+                    resource_group_name=resource_group_name,
+                    workspace_name=workspace_name,
+                    dataset_id=dataset_id,
+                    continuation_token_parameter=continuation_token_parameter,
+                    page_size=page_size,
+                    template_url=next_link,
+                )
+                request = _convert_request(request)
+                request.url = self._client.format_url(request.url)
+                request.method = "GET"
+            return request
+
+        async def extract_data(pipeline_response):
+            deserialized = self._deserialize("PaginatedDatasetDefinitionList", pipeline_response)
+            list_of_elem = deserialized.value
+            if cls:
+                list_of_elem = cls(list_of_elem)
+            return deserialized.next_link or None, AsyncList(list_of_elem)
+
+        async def get_next(next_link=None):
+            request = prepare_request(next_link)
+
+            pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+            response = pipeline_response.http_response
+
+            if response.status_code not in [200]:
+                map_error(status_code=response.status_code, response=response, error_map=error_map)
+                error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+                raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+            return pipeline_response
+
+
+        return AsyncItemPaged(
+            get_next, extract_data
+        )
+    get_all_dataset_definitions.metadata = {'url': '/dataset/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datasets/{datasetId}/definitions'}  # type: ignore
+
+    @distributed_trace_async
+    async def update_definition(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        dataset_id: str,
+        register_as_pending: Optional[bool] = False,
+        force_update: Optional[bool] = False,
+        dataset_type: Optional[str] = None,
+        user_version_id: Optional[str] = None,
+        body: Optional["_models.DatasetDefinition"] = None,
+        **kwargs: Any
+    ) -> "_models.Dataset":
+        """Update a dataset definition.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param dataset_id:
+        :type dataset_id: str
+        :param register_as_pending:
+        :type register_as_pending: bool
+        :param force_update:
+        :type force_update: bool
+        :param dataset_type:
+        :type dataset_type: str
+        :param user_version_id:
+        :type user_version_id: str
+        :param body:
+        :type body: ~azure.mgmt.machinelearningservices.models.DatasetDefinition
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: Dataset, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.Dataset
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.Dataset"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        content_type = kwargs.pop('content_type', "application/json")  # type: Optional[str]
+
+        if body is not None:
+            _json = self._serialize.body(body, 'DatasetDefinition')
+        else:
+            _json = None
+
+        request = build_update_definition_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            dataset_id=dataset_id,
+            content_type=content_type,
+            json=_json,
+            register_as_pending=register_as_pending,
+            force_update=force_update,
+            dataset_type=dataset_type,
+            user_version_id=user_version_id,
+            template_url=self.update_definition.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('Dataset', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    update_definition.metadata = {'url': '/dataset/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datasets/{datasetId}/definitions'}  # type: ignore
+
+
+    @distributed_trace
+    def get_all_dataset_versions(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        dataset_id: str,
+        continuation_token_parameter: Optional[str] = None,
+        page_size: Optional[int] = None,
+        **kwargs: Any
+    ) -> AsyncIterable["_models.PaginatedStringList"]:
+        """Get all dataset versions for a given dataset.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param dataset_id:
+        :type dataset_id: str
+        :param continuation_token_parameter:
+        :type continuation_token_parameter: str
+        :param page_size:
+        :type page_size: int
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: An iterator like instance of either PaginatedStringList or the result of cls(response)
+        :rtype:
+         ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.PaginatedStringList]
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.PaginatedStringList"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+        def prepare_request(next_link=None):
+            if not next_link:
+                
+                request = build_get_all_dataset_versions_request(
+                    subscription_id=subscription_id,
+                    resource_group_name=resource_group_name,
+                    workspace_name=workspace_name,
+                    dataset_id=dataset_id,
+                    continuation_token_parameter=continuation_token_parameter,
+                    page_size=page_size,
+                    template_url=self.get_all_dataset_versions.metadata['url'],
+                )
+                request = _convert_request(request)
+                request.url = self._client.format_url(request.url)
+
+            else:
+                
+                request = build_get_all_dataset_versions_request(
+                    subscription_id=subscription_id,
+                    resource_group_name=resource_group_name,
+                    workspace_name=workspace_name,
+                    dataset_id=dataset_id,
+                    continuation_token_parameter=continuation_token_parameter,
+                    page_size=page_size,
+                    template_url=next_link,
+                )
+                request = _convert_request(request)
+                request.url = self._client.format_url(request.url)
+                request.method = "GET"
+            return request
+
+        async def extract_data(pipeline_response):
+            deserialized = self._deserialize("PaginatedStringList", pipeline_response)
+            list_of_elem = deserialized.value
+            if cls:
+                list_of_elem = cls(list_of_elem)
+            return deserialized.next_link or None, AsyncList(list_of_elem)
+
+        async def get_next(next_link=None):
+            request = prepare_request(next_link)
+
+            pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+            response = pipeline_response.http_response
+
+            if response.status_code not in [200]:
+                map_error(status_code=response.status_code, response=response, error_map=error_map)
+                error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+                raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+            return pipeline_response
+
+
+        return AsyncItemPaged(
+            get_next, extract_data
+        )
+    get_all_dataset_versions.metadata = {'url': '/dataset/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datasets/{datasetId}/versions'}  # type: ignore
+
+    @distributed_trace_async
+    async def get_dataset_by_name(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        dataset_name: str,
+        version_id: Optional[str] = None,
+        include_latest_definition: Optional[bool] = True,
+        **kwargs: Any
+    ) -> "_models.Dataset":
+        """Get a dataset for a given dataset name.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param dataset_name:
+        :type dataset_name: str
+        :param version_id:
+        :type version_id: str
+        :param include_latest_definition:
+        :type include_latest_definition: bool
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: Dataset, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.Dataset
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.Dataset"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        
+        request = build_get_dataset_by_name_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            dataset_name=dataset_name,
+            version_id=version_id,
+            include_latest_definition=include_latest_definition,
+            template_url=self.get_dataset_by_name.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('Dataset', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    get_dataset_by_name.metadata = {'url': '/dataset/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datasets/query/name={datasetName}'}  # type: ignore
+
+
+    @distributed_trace
+    def list(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        dataset_names: Optional[List[str]] = None,
+        search_text: Optional[str] = None,
+        include_invisible: Optional[bool] = False,
+        status: Optional[str] = None,
+        continuation_token_parameter: Optional[str] = None,
+        page_size: Optional[int] = None,
+        include_latest_definition: Optional[bool] = False,
+        order_by: Optional[str] = None,
+        order_by_asc: Optional[bool] = False,
+        dataset_types: Optional[List[str]] = None,
+        **kwargs: Any
+    ) -> AsyncIterable["_models.PaginatedDatasetList"]:
+        """Get a list of datasets.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param dataset_names:
+        :type dataset_names: list[str]
+        :param search_text:
+        :type search_text: str
+        :param include_invisible:
+        :type include_invisible: bool
+        :param status:
+        :type status: str
+        :param continuation_token_parameter:
+        :type continuation_token_parameter: str
+        :param page_size:
+        :type page_size: int
+        :param include_latest_definition:
+        :type include_latest_definition: bool
+        :param order_by:
+        :type order_by: str
+        :param order_by_asc:
+        :type order_by_asc: bool
+        :param dataset_types:
+        :type dataset_types: list[str]
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: An iterator like instance of either PaginatedDatasetList or the result of
+         cls(response)
+        :rtype:
+         ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.PaginatedDatasetList]
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.PaginatedDatasetList"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+        def prepare_request(next_link=None):
+            if not next_link:
+                
+                request = build_list_request(
+                    subscription_id=subscription_id,
+                    resource_group_name=resource_group_name,
+                    workspace_name=workspace_name,
+                    dataset_names=dataset_names,
+                    search_text=search_text,
+                    include_invisible=include_invisible,
+                    status=status,
+                    continuation_token_parameter=continuation_token_parameter,
+                    page_size=page_size,
+                    include_latest_definition=include_latest_definition,
+                    order_by=order_by,
+                    order_by_asc=order_by_asc,
+                    dataset_types=dataset_types,
+                    template_url=self.list.metadata['url'],
+                )
+                request = _convert_request(request)
+                request.url = self._client.format_url(request.url)
+
+            else:
+                
+                request = build_list_request(
+                    subscription_id=subscription_id,
+                    resource_group_name=resource_group_name,
+                    workspace_name=workspace_name,
+                    dataset_names=dataset_names,
+                    search_text=search_text,
+                    include_invisible=include_invisible,
+                    status=status,
+                    continuation_token_parameter=continuation_token_parameter,
+                    page_size=page_size,
+                    include_latest_definition=include_latest_definition,
+                    order_by=order_by,
+                    order_by_asc=order_by_asc,
+                    dataset_types=dataset_types,
+                    template_url=next_link,
+                )
+                request = _convert_request(request)
+                request.url = self._client.format_url(request.url)
+                request.method = "GET"
+            return request
+
+        async def extract_data(pipeline_response):
+            deserialized = self._deserialize("PaginatedDatasetList", pipeline_response)
+            list_of_elem = deserialized.value
+            if cls:
+                list_of_elem = cls(list_of_elem)
+            return deserialized.next_link or None, AsyncList(list_of_elem)
+
+        async def get_next(next_link=None):
+            request = prepare_request(next_link)
+
+            pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+            response = pipeline_response.http_response
+
+            if response.status_code not in [200]:
+                map_error(status_code=response.status_code, response=response, error_map=error_map)
+                error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+                raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+            return pipeline_response
+
+
+        return AsyncItemPaged(
+            get_next, extract_data
+        )
+    list.metadata = {'url': '/dataset/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datasets'}  # type: ignore
+
+    @distributed_trace_async
+    async def register(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        register_as_pending: Optional[bool] = False,
+        if_exists_ok: Optional[bool] = True,
+        update_definition_if_exists: Optional[bool] = False,
+        with_data_hash: Optional[bool] = False,
+        user_version_id: Optional[str] = None,
+        body: Optional["_models.Dataset"] = None,
+        **kwargs: Any
+    ) -> "_models.Dataset":
+        """Register new dataset.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param register_as_pending:
+        :type register_as_pending: bool
+        :param if_exists_ok:
+        :type if_exists_ok: bool
+        :param update_definition_if_exists:
+        :type update_definition_if_exists: bool
+        :param with_data_hash:
+        :type with_data_hash: bool
+        :param user_version_id:
+        :type user_version_id: str
+        :param body:
+        :type body: ~azure.mgmt.machinelearningservices.models.Dataset
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: Dataset, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.Dataset
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.Dataset"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        content_type = kwargs.pop('content_type', "application/json")  # type: Optional[str]
+
+        if body is not None:
+            _json = self._serialize.body(body, 'Dataset')
+        else:
+            _json = None
+
+        request = build_register_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            content_type=content_type,
+            json=_json,
+            register_as_pending=register_as_pending,
+            if_exists_ok=if_exists_ok,
+            update_definition_if_exists=update_definition_if_exists,
+            with_data_hash=with_data_hash,
+            user_version_id=user_version_id,
+            template_url=self.register.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('Dataset', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    register.metadata = {'url': '/dataset/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datasets'}  # type: ignore
+
+
+    @distributed_trace_async
+    async def delete_all_datasets(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        **kwargs: Any
+    ) -> None:
+        """Unregister all datasets in the workspace.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: None, or the result of cls(response)
+        :rtype: None
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType[None]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        
+        request = build_delete_all_datasets_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            template_url=self.delete_all_datasets.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+        response = pipeline_response.http_response
+
+        if response.status_code not in []:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        if cls:
+            return cls(pipeline_response, None, {})
+
+    delete_all_datasets.metadata = {'url': '/dataset/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datasets'}  # type: ignore
+
+
+    @distributed_trace_async
+    async def update_dataset(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        dataset_id: str,
+        force_update: Optional[bool] = False,
+        body: Optional["_models.Dataset"] = None,
+        **kwargs: Any
+    ) -> "_models.Dataset":
+        """Update a dataset.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param dataset_id:
+        :type dataset_id: str
+        :param force_update:
+        :type force_update: bool
+        :param body:
+        :type body: ~azure.mgmt.machinelearningservices.models.Dataset
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: Dataset, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.Dataset
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.Dataset"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        content_type = kwargs.pop('content_type', "application/json")  # type: Optional[str]
+
+        if body is not None:
+            _json = self._serialize.body(body, 'Dataset')
+        else:
+            _json = None
+
+        request = build_update_dataset_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            dataset_id=dataset_id,
+            content_type=content_type,
+            json=_json,
+            force_update=force_update,
+            template_url=self.update_dataset.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('Dataset', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    update_dataset.metadata = {'url': '/dataset/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datasets/{datasetId}'}  # type: ignore
+
+
+    @distributed_trace_async
+    async def unregister_dataset(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        name: str,
+        **kwargs: Any
+    ) -> None:
+        """Unregister a dataset.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param name:
+        :type name: str
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: None, or the result of cls(response)
+        :rtype: None
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType[None]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        
+        request = build_unregister_dataset_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            name=name,
+            template_url=self.unregister_dataset.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+        response = pipeline_response.http_response
+
+        if response.status_code not in []:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        if cls:
+            return cls(pipeline_response, None, {})
+
+    unregister_dataset.metadata = {'url': '/dataset/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datasets/{name}'}  # type: ignore
+
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/dataset_dataplane/aio/operations/_delete_operations.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/dataset_dataplane/aio/operations/_delete_operations.py
new file mode 100644
index 00000000..9bb293a5
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/dataset_dataplane/aio/operations/_delete_operations.py
@@ -0,0 +1,104 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+import functools
+from typing import Any, Callable, Dict, Generic, Optional, TypeVar
+import warnings
+
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import AsyncHttpResponse
+from azure.core.rest import HttpRequest
+from azure.core.tracing.decorator_async import distributed_trace_async
+from azure.mgmt.core.exceptions import ARMErrorFormat
+
+from ... import models as _models
+from ..._vendor import _convert_request
+from ...operations._delete_operations import build_data_container_request
+T = TypeVar('T')
+ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
+
+class DeleteOperations:
+    """DeleteOperations async operations.
+
+    You should not instantiate this class directly. Instead, you should create a Client instance that
+    instantiates it for you and attaches it as an attribute.
+
+    :ivar models: Alias to model classes used in this operation group.
+    :type models: ~azure.mgmt.machinelearningservices.models
+    :param client: Client for service requests.
+    :param config: Configuration of service client.
+    :param serializer: An object model serializer.
+    :param deserializer: An object model deserializer.
+    """
+
+    models = _models
+
+    def __init__(self, client, config, serializer, deserializer) -> None:
+        self._client = client
+        self._serialize = serializer
+        self._deserialize = deserializer
+        self._config = config
+
+    @distributed_trace_async
+    async def data_container(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        name: str,
+        **kwargs: Any
+    ) -> "_models.HttpResponseMessage":
+        """data_container.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param name:
+        :type name: str
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: HttpResponseMessage, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.HttpResponseMessage
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.HttpResponseMessage"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        
+        request = build_data_container_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            name=name,
+            template_url=self.data_container.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('HttpResponseMessage', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    data_container.metadata = {'url': '/data/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datacontainer/{name}'}  # type: ignore
+
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/dataset_dataplane/aio/operations/_get_operation_status_operations.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/dataset_dataplane/aio/operations/_get_operation_status_operations.py
new file mode 100644
index 00000000..273b307f
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/dataset_dataplane/aio/operations/_get_operation_status_operations.py
@@ -0,0 +1,170 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+import functools
+from typing import Any, Callable, Dict, Generic, Optional, TypeVar, Union
+import warnings
+
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import AsyncHttpResponse
+from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
+from azure.core.rest import HttpRequest
+from azure.core.tracing.decorator_async import distributed_trace_async
+from azure.mgmt.core.exceptions import ARMErrorFormat
+from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
+
+from ... import models as _models
+from ..._vendor import _convert_request
+from ...operations._get_operation_status_operations import build_get_dataset_operation_status_request_initial
+T = TypeVar('T')
+ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
+
+class GetOperationStatusOperations:
+    """GetOperationStatusOperations async operations.
+
+    You should not instantiate this class directly. Instead, you should create a Client instance that
+    instantiates it for you and attaches it as an attribute.
+
+    :ivar models: Alias to model classes used in this operation group.
+    :type models: ~azure.mgmt.machinelearningservices.models
+    :param client: Client for service requests.
+    :param config: Configuration of service client.
+    :param serializer: An object model serializer.
+    :param deserializer: An object model deserializer.
+    """
+
+    models = _models
+
+    def __init__(self, client, config, serializer, deserializer) -> None:
+        self._client = client
+        self._serialize = serializer
+        self._deserialize = deserializer
+        self._config = config
+
+    async def _get_dataset_operation_status_initial(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        operation_id: str,
+        **kwargs: Any
+    ) -> Optional["_models.LongRunningOperationResponse1LongRunningOperationResponseObject"]:
+        cls = kwargs.pop('cls', None)  # type: ClsType[Optional["_models.LongRunningOperationResponse1LongRunningOperationResponseObject"]]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        
+        request = build_get_dataset_operation_status_request_initial(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            operation_id=operation_id,
+            template_url=self._get_dataset_operation_status_initial.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200, 202]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            raise HttpResponseError(response=response, error_format=ARMErrorFormat)
+
+        deserialized = None
+        response_headers = {}
+        if response.status_code == 200:
+            deserialized = self._deserialize('LongRunningOperationResponse1LongRunningOperationResponseObject', pipeline_response)
+
+        if response.status_code == 202:
+            response_headers['Location']=self._deserialize('str', response.headers.get('Location'))
+            
+
+        if cls:
+            return cls(pipeline_response, deserialized, response_headers)
+
+        return deserialized
+
+    _get_dataset_operation_status_initial.metadata = {'url': '/dataset/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/operations/{operationId}'}  # type: ignore
+
+
+    @distributed_trace_async
+    async def begin_get_dataset_operation_status(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        operation_id: str,
+        **kwargs: Any
+    ) -> AsyncLROPoller["_models.LongRunningOperationResponse1LongRunningOperationResponseObject"]:
+        """get_dataset_operation_status.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param operation_id:
+        :type operation_id: str
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :keyword str continuation_token: A continuation token to restart a poller from a saved state.
+        :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
+         this operation to not poll, or pass in your own initialized polling object for a personal
+         polling strategy.
+        :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
+        :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
+         Retry-After header is present.
+        :return: An instance of AsyncLROPoller that returns either
+         LongRunningOperationResponse1LongRunningOperationResponseObject or the result of cls(response)
+        :rtype:
+         ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.LongRunningOperationResponse1LongRunningOperationResponseObject]
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        polling = kwargs.pop('polling', True)  # type: Union[bool, azure.core.polling.AsyncPollingMethod]
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.LongRunningOperationResponse1LongRunningOperationResponseObject"]
+        lro_delay = kwargs.pop(
+            'polling_interval',
+            self._config.polling_interval
+        )
+        cont_token = kwargs.pop('continuation_token', None)  # type: Optional[str]
+        if cont_token is None:
+            raw_result = await self._get_dataset_operation_status_initial(
+                subscription_id=subscription_id,
+                resource_group_name=resource_group_name,
+                workspace_name=workspace_name,
+                operation_id=operation_id,
+                cls=lambda x,y,z: x,
+                **kwargs
+            )
+        kwargs.pop('error_map', None)
+
+        def get_long_running_output(pipeline_response):
+            response = pipeline_response.http_response
+            deserialized = self._deserialize('LongRunningOperationResponse1LongRunningOperationResponseObject', pipeline_response)
+            if cls:
+                return cls(pipeline_response, deserialized, {})
+            return deserialized
+
+
+        if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, **kwargs)
+        elif polling is False: polling_method = AsyncNoPolling()
+        else: polling_method = polling
+        if cont_token:
+            return AsyncLROPoller.from_continuation_token(
+                polling_method=polling_method,
+                continuation_token=cont_token,
+                client=self._client,
+                deserialization_callback=get_long_running_output
+            )
+        else:
+            return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
+
+    begin_get_dataset_operation_status.metadata = {'url': '/dataset/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/operations/{operationId}'}  # type: ignore
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/dataset_dataplane/models/__init__.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/dataset_dataplane/models/__init__.py
new file mode 100644
index 00000000..42488e7c
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/dataset_dataplane/models/__init__.py
@@ -0,0 +1,187 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+try:
+    from ._models_py3 import ActionResult
+    from ._models_py3 import AssetId
+    from ._models_py3 import BatchDataUriResponse
+    from ._models_py3 import BatchGetResolvedURIs
+    from ._models_py3 import ColumnDefinition
+    from ._models_py3 import CreateUnregisteredInputData
+    from ._models_py3 import CreateUnregisteredOutputData
+    from ._models_py3 import DataCallRequest
+    from ._models_py3 import DataContainer
+    from ._models_py3 import DataContainerEntity
+    from ._models_py3 import DataContainerMutable
+    from ._models_py3 import DataField
+    from ._models_py3 import DataUriV2Response
+    from ._models_py3 import DataVersion
+    from ._models_py3 import DataVersionEntity
+    from ._models_py3 import DataVersionMutable
+    from ._models_py3 import DataViewSetResult
+    from ._models_py3 import Dataset
+    from ._models_py3 import DatasetDefinition
+    from ._models_py3 import DatasetDefinitionReference
+    from ._models_py3 import DatasetPath
+    from ._models_py3 import DatasetState
+    from ._models_py3 import DatasetV2
+    from ._models_py3 import EntityMetadata
+    from ._models_py3 import ErrorAdditionalInfo
+    from ._models_py3 import ErrorResponse
+    from ._models_py3 import HistogramBin
+    from ._models_py3 import HttpContent
+    from ._models_py3 import HttpMethod
+    from ._models_py3 import HttpRequestMessage
+    from ._models_py3 import HttpResponseMessage
+    from ._models_py3 import InnerErrorResponse
+    from ._models_py3 import KeyValuePairStringIEnumerable1
+    from ._models_py3 import LongRunningOperationResponse1LongRunningOperationResponseObject
+    from ._models_py3 import Moments
+    from ._models_py3 import PaginatedDataContainerEntityList
+    from ._models_py3 import PaginatedDataVersionEntityList
+    from ._models_py3 import PaginatedDatasetDefinitionList
+    from ._models_py3 import PaginatedDatasetList
+    from ._models_py3 import PaginatedDatasetV2List
+    from ._models_py3 import PaginatedStringList
+    from ._models_py3 import ProfileActionResult
+    from ._models_py3 import ProfileResult
+    from ._models_py3 import Quantiles
+    from ._models_py3 import RegisterExistingData
+    from ._models_py3 import RootError
+    from ._models_py3 import STypeCount
+    from ._models_py3 import SqlDataPath
+    from ._models_py3 import StoredProcedureParameter
+    from ._models_py3 import StringLengthCount
+    from ._models_py3 import TypeCount
+    from ._models_py3 import User
+    from ._models_py3 import ValueCount
+except (SyntaxError, ImportError):
+    from ._models import ActionResult  # type: ignore
+    from ._models import AssetId  # type: ignore
+    from ._models import BatchDataUriResponse  # type: ignore
+    from ._models import BatchGetResolvedURIs  # type: ignore
+    from ._models import ColumnDefinition  # type: ignore
+    from ._models import CreateUnregisteredInputData  # type: ignore
+    from ._models import CreateUnregisteredOutputData  # type: ignore
+    from ._models import DataCallRequest  # type: ignore
+    from ._models import DataContainer  # type: ignore
+    from ._models import DataContainerEntity  # type: ignore
+    from ._models import DataContainerMutable  # type: ignore
+    from ._models import DataField  # type: ignore
+    from ._models import DataUriV2Response  # type: ignore
+    from ._models import DataVersion  # type: ignore
+    from ._models import DataVersionEntity  # type: ignore
+    from ._models import DataVersionMutable  # type: ignore
+    from ._models import DataViewSetResult  # type: ignore
+    from ._models import Dataset  # type: ignore
+    from ._models import DatasetDefinition  # type: ignore
+    from ._models import DatasetDefinitionReference  # type: ignore
+    from ._models import DatasetPath  # type: ignore
+    from ._models import DatasetState  # type: ignore
+    from ._models import DatasetV2  # type: ignore
+    from ._models import EntityMetadata  # type: ignore
+    from ._models import ErrorAdditionalInfo  # type: ignore
+    from ._models import ErrorResponse  # type: ignore
+    from ._models import HistogramBin  # type: ignore
+    from ._models import HttpContent  # type: ignore
+    from ._models import HttpMethod  # type: ignore
+    from ._models import HttpRequestMessage  # type: ignore
+    from ._models import HttpResponseMessage  # type: ignore
+    from ._models import InnerErrorResponse  # type: ignore
+    from ._models import KeyValuePairStringIEnumerable1  # type: ignore
+    from ._models import LongRunningOperationResponse1LongRunningOperationResponseObject  # type: ignore
+    from ._models import Moments  # type: ignore
+    from ._models import PaginatedDataContainerEntityList  # type: ignore
+    from ._models import PaginatedDataVersionEntityList  # type: ignore
+    from ._models import PaginatedDatasetDefinitionList  # type: ignore
+    from ._models import PaginatedDatasetList  # type: ignore
+    from ._models import PaginatedDatasetV2List  # type: ignore
+    from ._models import PaginatedStringList  # type: ignore
+    from ._models import ProfileActionResult  # type: ignore
+    from ._models import ProfileResult  # type: ignore
+    from ._models import Quantiles  # type: ignore
+    from ._models import RegisterExistingData  # type: ignore
+    from ._models import RootError  # type: ignore
+    from ._models import STypeCount  # type: ignore
+    from ._models import SqlDataPath  # type: ignore
+    from ._models import StoredProcedureParameter  # type: ignore
+    from ._models import StringLengthCount  # type: ignore
+    from ._models import TypeCount  # type: ignore
+    from ._models import User  # type: ignore
+    from ._models import ValueCount  # type: ignore
+
+from ._azure_machine_learning_workspaces_enums import (
+    DataflowType,
+    FieldType,
+    HttpStatusCode,
+    HttpVersionPolicy,
+    SType,
+    StoredProcedureParameterType,
+)
+
+__all__ = [
+    'ActionResult',
+    'AssetId',
+    'BatchDataUriResponse',
+    'BatchGetResolvedURIs',
+    'ColumnDefinition',
+    'CreateUnregisteredInputData',
+    'CreateUnregisteredOutputData',
+    'DataCallRequest',
+    'DataContainer',
+    'DataContainerEntity',
+    'DataContainerMutable',
+    'DataField',
+    'DataUriV2Response',
+    'DataVersion',
+    'DataVersionEntity',
+    'DataVersionMutable',
+    'DataViewSetResult',
+    'Dataset',
+    'DatasetDefinition',
+    'DatasetDefinitionReference',
+    'DatasetPath',
+    'DatasetState',
+    'DatasetV2',
+    'EntityMetadata',
+    'ErrorAdditionalInfo',
+    'ErrorResponse',
+    'HistogramBin',
+    'HttpContent',
+    'HttpMethod',
+    'HttpRequestMessage',
+    'HttpResponseMessage',
+    'InnerErrorResponse',
+    'KeyValuePairStringIEnumerable1',
+    'LongRunningOperationResponse1LongRunningOperationResponseObject',
+    'Moments',
+    'PaginatedDataContainerEntityList',
+    'PaginatedDataVersionEntityList',
+    'PaginatedDatasetDefinitionList',
+    'PaginatedDatasetList',
+    'PaginatedDatasetV2List',
+    'PaginatedStringList',
+    'ProfileActionResult',
+    'ProfileResult',
+    'Quantiles',
+    'RegisterExistingData',
+    'RootError',
+    'STypeCount',
+    'SqlDataPath',
+    'StoredProcedureParameter',
+    'StringLengthCount',
+    'TypeCount',
+    'User',
+    'ValueCount',
+    'DataflowType',
+    'FieldType',
+    'HttpStatusCode',
+    'HttpVersionPolicy',
+    'SType',
+    'StoredProcedureParameterType',
+]
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/dataset_dataplane/models/_azure_machine_learning_workspaces_enums.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/dataset_dataplane/models/_azure_machine_learning_workspaces_enums.py
new file mode 100644
index 00000000..b06eab55
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/dataset_dataplane/models/_azure_machine_learning_workspaces_enums.py
@@ -0,0 +1,118 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from enum import Enum
+from azure.core import CaseInsensitiveEnumMeta
+
+
+class DataflowType(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+
+    JSON = "Json"
+    YAML = "Yaml"
+
+class FieldType(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+
+    STRING = "String"
+    BOOLEAN = "Boolean"
+    INTEGER = "Integer"
+    DECIMAL = "Decimal"
+    DATE = "Date"
+    UNKNOWN = "Unknown"
+    ERROR = "Error"
+    NULL = "Null"
+    DATA_ROW = "DataRow"
+    LIST = "List"
+    STREAM = "Stream"
+
+class HttpStatusCode(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+
+    CONTINUE_ENUM = "Continue"
+    SWITCHING_PROTOCOLS = "SwitchingProtocols"
+    PROCESSING = "Processing"
+    EARLY_HINTS = "EarlyHints"
+    OK = "OK"
+    CREATED = "Created"
+    ACCEPTED = "Accepted"
+    NON_AUTHORITATIVE_INFORMATION = "NonAuthoritativeInformation"
+    NO_CONTENT = "NoContent"
+    RESET_CONTENT = "ResetContent"
+    PARTIAL_CONTENT = "PartialContent"
+    MULTI_STATUS = "MultiStatus"
+    ALREADY_REPORTED = "AlreadyReported"
+    IM_USED = "IMUsed"
+    AMBIGUOUS = "Ambiguous"
+    MOVED = "Moved"
+    REDIRECT = "Redirect"
+    REDIRECT_METHOD = "RedirectMethod"
+    NOT_MODIFIED = "NotModified"
+    USE_PROXY = "UseProxy"
+    UNUSED = "Unused"
+    TEMPORARY_REDIRECT = "TemporaryRedirect"
+    PERMANENT_REDIRECT = "PermanentRedirect"
+    BAD_REQUEST = "BadRequest"
+    UNAUTHORIZED = "Unauthorized"
+    PAYMENT_REQUIRED = "PaymentRequired"
+    FORBIDDEN = "Forbidden"
+    NOT_FOUND = "NotFound"
+    METHOD_NOT_ALLOWED = "MethodNotAllowed"
+    NOT_ACCEPTABLE = "NotAcceptable"
+    PROXY_AUTHENTICATION_REQUIRED = "ProxyAuthenticationRequired"
+    REQUEST_TIMEOUT = "RequestTimeout"
+    CONFLICT = "Conflict"
+    GONE = "Gone"
+    LENGTH_REQUIRED = "LengthRequired"
+    PRECONDITION_FAILED = "PreconditionFailed"
+    REQUEST_ENTITY_TOO_LARGE = "RequestEntityTooLarge"
+    REQUEST_URI_TOO_LONG = "RequestUriTooLong"
+    UNSUPPORTED_MEDIA_TYPE = "UnsupportedMediaType"
+    REQUESTED_RANGE_NOT_SATISFIABLE = "RequestedRangeNotSatisfiable"
+    EXPECTATION_FAILED = "ExpectationFailed"
+    MISDIRECTED_REQUEST = "MisdirectedRequest"
+    UNPROCESSABLE_ENTITY = "UnprocessableEntity"
+    LOCKED = "Locked"
+    FAILED_DEPENDENCY = "FailedDependency"
+    UPGRADE_REQUIRED = "UpgradeRequired"
+    PRECONDITION_REQUIRED = "PreconditionRequired"
+    TOO_MANY_REQUESTS = "TooManyRequests"
+    REQUEST_HEADER_FIELDS_TOO_LARGE = "RequestHeaderFieldsTooLarge"
+    UNAVAILABLE_FOR_LEGAL_REASONS = "UnavailableForLegalReasons"
+    INTERNAL_SERVER_ERROR = "InternalServerError"
+    NOT_IMPLEMENTED = "NotImplemented"
+    BAD_GATEWAY = "BadGateway"
+    SERVICE_UNAVAILABLE = "ServiceUnavailable"
+    GATEWAY_TIMEOUT = "GatewayTimeout"
+    HTTP_VERSION_NOT_SUPPORTED = "HttpVersionNotSupported"
+    VARIANT_ALSO_NEGOTIATES = "VariantAlsoNegotiates"
+    INSUFFICIENT_STORAGE = "InsufficientStorage"
+    LOOP_DETECTED = "LoopDetected"
+    NOT_EXTENDED = "NotExtended"
+    NETWORK_AUTHENTICATION_REQUIRED = "NetworkAuthenticationRequired"
+
+class HttpVersionPolicy(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+
+    REQUEST_VERSION_OR_LOWER = "RequestVersionOrLower"
+    REQUEST_VERSION_OR_HIGHER = "RequestVersionOrHigher"
+    REQUEST_VERSION_EXACT = "RequestVersionExact"
+
+class StoredProcedureParameterType(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+
+    STRING = "String"
+    INT = "Int"
+    DECIMAL = "Decimal"
+    GUID = "Guid"
+    BOOLEAN = "Boolean"
+    DATE = "Date"
+
+class SType(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+
+    EMAIL_ADDRESS = "EmailAddress"
+    GEOGRAPHIC_COORDINATE = "GeographicCoordinate"
+    IPV4_ADDRESS = "Ipv4Address"
+    IPV6_ADDRESS = "Ipv6Address"
+    US_PHONE_NUMBER = "UsPhoneNumber"
+    ZIP_CODE = "ZipCode"
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/dataset_dataplane/models/_models.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/dataset_dataplane/models/_models.py
new file mode 100644
index 00000000..9bc1f683
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/dataset_dataplane/models/_models.py
@@ -0,0 +1,2608 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from azure.core.exceptions import HttpResponseError
+import msrest.serialization
+
+
+class ActionResult(msrest.serialization.Model):
+    """ActionResult.
+
+    :ivar is_up_to_date:
+    :vartype is_up_to_date: bool
+    :ivar is_up_to_date_error:
+    :vartype is_up_to_date_error: str
+    :ivar result_artifact_ids:
+    :vartype result_artifact_ids: list[str]
+    :ivar in_progress_action_id:
+    :vartype in_progress_action_id: str
+    :ivar run_id:
+    :vartype run_id: str
+    :ivar experiment_name:
+    :vartype experiment_name: str
+    :ivar datastore_name:
+    :vartype datastore_name: str
+    """
+
+    _attribute_map = {
+        'is_up_to_date': {'key': 'isUpToDate', 'type': 'bool'},
+        'is_up_to_date_error': {'key': 'isUpToDateError', 'type': 'str'},
+        'result_artifact_ids': {'key': 'resultArtifactIds', 'type': '[str]'},
+        'in_progress_action_id': {'key': 'inProgressActionId', 'type': 'str'},
+        'run_id': {'key': 'runId', 'type': 'str'},
+        'experiment_name': {'key': 'experimentName', 'type': 'str'},
+        'datastore_name': {'key': 'datastoreName', 'type': 'str'},
+    }
+
+    def __init__(
+        self,
+        **kwargs
+    ):
+        """
+        :keyword is_up_to_date:
+        :paramtype is_up_to_date: bool
+        :keyword is_up_to_date_error:
+        :paramtype is_up_to_date_error: str
+        :keyword result_artifact_ids:
+        :paramtype result_artifact_ids: list[str]
+        :keyword in_progress_action_id:
+        :paramtype in_progress_action_id: str
+        :keyword run_id:
+        :paramtype run_id: str
+        :keyword experiment_name:
+        :paramtype experiment_name: str
+        :keyword datastore_name:
+        :paramtype datastore_name: str
+        """
+        super(ActionResult, self).__init__(**kwargs)
+        self.is_up_to_date = kwargs.get('is_up_to_date', None)
+        self.is_up_to_date_error = kwargs.get('is_up_to_date_error', None)
+        self.result_artifact_ids = kwargs.get('result_artifact_ids', None)
+        self.in_progress_action_id = kwargs.get('in_progress_action_id', None)
+        self.run_id = kwargs.get('run_id', None)
+        self.experiment_name = kwargs.get('experiment_name', None)
+        self.datastore_name = kwargs.get('datastore_name', None)
+
+
+class AssetId(msrest.serialization.Model):
+    """AssetId.
+
+    All required parameters must be populated in order to send to Azure.
+
+    :ivar value: Required.
+    :vartype value: str
+    """
+
+    _validation = {
+        'value': {'required': True},
+    }
+
+    _attribute_map = {
+        'value': {'key': 'value', 'type': 'str'},
+    }
+
+    def __init__(
+        self,
+        **kwargs
+    ):
+        """
+        :keyword value: Required.
+        :paramtype value: str
+        """
+        super(AssetId, self).__init__(**kwargs)
+        self.value = kwargs['value']
+
+
+class BatchDataUriResponse(msrest.serialization.Model):
+    """BatchDataUriResponse.
+
+    :ivar values: Dictionary of :code:`<DataUriV2Response>`.
+    :vartype values: dict[str, ~azure.mgmt.machinelearningservices.models.DataUriV2Response]
+    """
+
+    _attribute_map = {
+        'values': {'key': 'values', 'type': '{DataUriV2Response}'},
+    }
+
+    def __init__(
+        self,
+        **kwargs
+    ):
+        """
+        :keyword values: Dictionary of :code:`<DataUriV2Response>`.
+        :paramtype values: dict[str, ~azure.mgmt.machinelearningservices.models.DataUriV2Response]
+        """
+        super(BatchDataUriResponse, self).__init__(**kwargs)
+        self.values = kwargs.get('values', None)
+
+
+class BatchGetResolvedURIs(msrest.serialization.Model):
+    """BatchGetResolvedURIs.
+
+    All required parameters must be populated in order to send to Azure.
+
+    :ivar values: Required.
+    :vartype values: list[str]
+    """
+
+    _validation = {
+        'values': {'required': True},
+    }
+
+    _attribute_map = {
+        'values': {'key': 'values', 'type': '[str]'},
+    }
+
+    def __init__(
+        self,
+        **kwargs
+    ):
+        """
+        :keyword values: Required.
+        :paramtype values: list[str]
+        """
+        super(BatchGetResolvedURIs, self).__init__(**kwargs)
+        self.values = kwargs['values']
+
+
+class ColumnDefinition(msrest.serialization.Model):
+    """ColumnDefinition.
+
+    :ivar id:
+    :vartype id: str
+    :ivar type: Possible values include: "String", "Boolean", "Integer", "Decimal", "Date",
+     "Unknown", "Error", "Null", "DataRow", "List", "Stream".
+    :vartype type: str or ~azure.mgmt.machinelearningservices.models.FieldType
+    """
+
+    _attribute_map = {
+        'id': {'key': 'id', 'type': 'str'},
+        'type': {'key': 'type', 'type': 'str'},
+    }
+
+    def __init__(
+        self,
+        **kwargs
+    ):
+        """
+        :keyword id:
+        :paramtype id: str
+        :keyword type: Possible values include: "String", "Boolean", "Integer", "Decimal", "Date",
+         "Unknown", "Error", "Null", "DataRow", "List", "Stream".
+        :paramtype type: str or ~azure.mgmt.machinelearningservices.models.FieldType
+        """
+        super(ColumnDefinition, self).__init__(**kwargs)
+        self.id = kwargs.get('id', None)
+        self.type = kwargs.get('type', None)
+
+
+class CreateUnregisteredInputData(msrest.serialization.Model):
+    """CreateUnregisteredInputData.
+
+    All required parameters must be populated in order to send to Azure.
+
+    :ivar run_id: Required.
+    :vartype run_id: str
+    :ivar input_name: Required.
+    :vartype input_name: str
+    :ivar uri: Required.
+    :vartype uri: str
+    :ivar type: Required.
+    :vartype type: str
+    """
+
+    _validation = {
+        'run_id': {'required': True},
+        'input_name': {'required': True},
+        'uri': {'required': True},
+        'type': {'required': True},
+    }
+
+    _attribute_map = {
+        'run_id': {'key': 'runId', 'type': 'str'},
+        'input_name': {'key': 'inputName', 'type': 'str'},
+        'uri': {'key': 'uri', 'type': 'str'},
+        'type': {'key': 'type', 'type': 'str'},
+    }
+
+    def __init__(
+        self,
+        **kwargs
+    ):
+        """
+        :keyword run_id: Required.
+        :paramtype run_id: str
+        :keyword input_name: Required.
+        :paramtype input_name: str
+        :keyword uri: Required.
+        :paramtype uri: str
+        :keyword type: Required.
+        :paramtype type: str
+        """
+        super(CreateUnregisteredInputData, self).__init__(**kwargs)
+        self.run_id = kwargs['run_id']
+        self.input_name = kwargs['input_name']
+        self.uri = kwargs['uri']
+        self.type = kwargs['type']
+
+
+class CreateUnregisteredOutputData(msrest.serialization.Model):
+    """CreateUnregisteredOutputData.
+
+    All required parameters must be populated in order to send to Azure.
+
+    :ivar run_id: Required.
+    :vartype run_id: str
+    :ivar output_name: Required.
+    :vartype output_name: str
+    :ivar uri: Required.
+    :vartype uri: str
+    :ivar type: Required.
+    :vartype type: str
+    """
+
+    _validation = {
+        'run_id': {'required': True},
+        'output_name': {'required': True},
+        'uri': {'required': True},
+        'type': {'required': True},
+    }
+
+    _attribute_map = {
+        'run_id': {'key': 'runId', 'type': 'str'},
+        'output_name': {'key': 'outputName', 'type': 'str'},
+        'uri': {'key': 'uri', 'type': 'str'},
+        'type': {'key': 'type', 'type': 'str'},
+    }
+
+    def __init__(
+        self,
+        **kwargs
+    ):
+        """
+        :keyword run_id: Required.
+        :paramtype run_id: str
+        :keyword output_name: Required.
+        :paramtype output_name: str
+        :keyword uri: Required.
+        :paramtype uri: str
+        :keyword type: Required.
+        :paramtype type: str
+        """
+        super(CreateUnregisteredOutputData, self).__init__(**kwargs)
+        self.run_id = kwargs['run_id']
+        self.output_name = kwargs['output_name']
+        self.uri = kwargs['uri']
+        self.type = kwargs['type']
+
+
+class DataCallRequest(msrest.serialization.Model):
+    """DataCallRequest.
+
+    :ivar data_uri:
+    :vartype data_uri: str
+    :ivar data_type:
+    :vartype data_type: str
+    :ivar asset_id:
+    :vartype asset_id: str
+    :ivar data_container_name:
+    :vartype data_container_name: str
+    :ivar version_id:
+    :vartype version_id: str
+    """
+
+    _attribute_map = {
+        'data_uri': {'key': 'dataUri', 'type': 'str'},
+        'data_type': {'key': 'dataType', 'type': 'str'},
+        'asset_id': {'key': 'assetId', 'type': 'str'},
+        'data_container_name': {'key': 'dataContainerName', 'type': 'str'},
+        'version_id': {'key': 'versionId', 'type': 'str'},
+    }
+
+    def __init__(
+        self,
+        **kwargs
+    ):
+        """
+        :keyword data_uri:
+        :paramtype data_uri: str
+        :keyword data_type:
+        :paramtype data_type: str
+        :keyword asset_id:
+        :paramtype asset_id: str
+        :keyword data_container_name:
+        :paramtype data_container_name: str
+        :keyword version_id:
+        :paramtype version_id: str
+        """
+        super(DataCallRequest, self).__init__(**kwargs)
+        self.data_uri = kwargs.get('data_uri', None)
+        self.data_type = kwargs.get('data_type', None)
+        self.asset_id = kwargs.get('asset_id', None)
+        self.data_container_name = kwargs.get('data_container_name', None)
+        self.version_id = kwargs.get('version_id', None)
+
+
+class DataContainer(msrest.serialization.Model):
+    """DataContainer.
+
+    All required parameters must be populated in order to send to Azure.
+
+    :ivar name: Required.
+    :vartype name: str
+    :ivar data_type: Required.
+    :vartype data_type: str
+    :ivar mutable_props:
+    :vartype mutable_props: ~azure.mgmt.machinelearningservices.models.DataContainerMutable
+    :ivar is_registered:
+    :vartype is_registered: bool
+    """
+
+    _validation = {
+        'name': {'required': True},
+        'data_type': {'required': True},
+    }
+
+    _attribute_map = {
+        'name': {'key': 'name', 'type': 'str'},
+        'data_type': {'key': 'dataType', 'type': 'str'},
+        'mutable_props': {'key': 'mutableProps', 'type': 'DataContainerMutable'},
+        'is_registered': {'key': 'isRegistered', 'type': 'bool'},
+    }
+
+    def __init__(
+        self,
+        **kwargs
+    ):
+        """
+        :keyword name: Required.
+        :paramtype name: str
+        :keyword data_type: Required.
+        :paramtype data_type: str
+        :keyword mutable_props:
+        :paramtype mutable_props: ~azure.mgmt.machinelearningservices.models.DataContainerMutable
+        :keyword is_registered:
+        :paramtype is_registered: bool
+        """
+        super(DataContainer, self).__init__(**kwargs)
+        self.name = kwargs['name']
+        self.data_type = kwargs['data_type']
+        self.mutable_props = kwargs.get('mutable_props', None)
+        self.is_registered = kwargs.get('is_registered', None)
+
+
+class DataContainerEntity(msrest.serialization.Model):
+    """DataContainerEntity.
+
+    :ivar data_container:
+    :vartype data_container: ~azure.mgmt.machinelearningservices.models.DataContainer
+    :ivar entity_metadata:
+    :vartype entity_metadata: ~azure.mgmt.machinelearningservices.models.EntityMetadata
+    :ivar latest_version:
+    :vartype latest_version: ~azure.mgmt.machinelearningservices.models.DataVersionEntity
+    :ivar next_version_id:
+    :vartype next_version_id: str
+    :ivar legacy_dataset_type:
+    :vartype legacy_dataset_type: str
+    """
+
+    _attribute_map = {
+        'data_container': {'key': 'dataContainer', 'type': 'DataContainer'},
+        'entity_metadata': {'key': 'entityMetadata', 'type': 'EntityMetadata'},
+        'latest_version': {'key': 'latestVersion', 'type': 'DataVersionEntity'},
+        'next_version_id': {'key': 'nextVersionId', 'type': 'str'},
+        'legacy_dataset_type': {'key': 'legacyDatasetType', 'type': 'str'},
+    }
+
+    def __init__(
+        self,
+        **kwargs
+    ):
+        """
+        :keyword data_container:
+        :paramtype data_container: ~azure.mgmt.machinelearningservices.models.DataContainer
+        :keyword entity_metadata:
+        :paramtype entity_metadata: ~azure.mgmt.machinelearningservices.models.EntityMetadata
+        :keyword latest_version:
+        :paramtype latest_version: ~azure.mgmt.machinelearningservices.models.DataVersionEntity
+        :keyword next_version_id:
+        :paramtype next_version_id: str
+        :keyword legacy_dataset_type:
+        :paramtype legacy_dataset_type: str
+        """
+        super(DataContainerEntity, self).__init__(**kwargs)
+        self.data_container = kwargs.get('data_container', None)
+        self.entity_metadata = kwargs.get('entity_metadata', None)
+        self.latest_version = kwargs.get('latest_version', None)
+        self.next_version_id = kwargs.get('next_version_id', None)
+        self.legacy_dataset_type = kwargs.get('legacy_dataset_type', None)
+
+
+class DataContainerMutable(msrest.serialization.Model):
+    """DataContainerMutable.
+
+    :ivar description:
+    :vartype description: str
+    :ivar tags: A set of tags. Dictionary of :code:`<string>`.
+    :vartype tags: dict[str, str]
+    :ivar is_archived:
+    :vartype is_archived: bool
+    """
+
+    _attribute_map = {
+        'description': {'key': 'description', 'type': 'str'},
+        'tags': {'key': 'tags', 'type': '{str}'},
+        'is_archived': {'key': 'isArchived', 'type': 'bool'},
+    }
+
+    def __init__(
+        self,
+        **kwargs
+    ):
+        """
+        :keyword description:
+        :paramtype description: str
+        :keyword tags: A set of tags. Dictionary of :code:`<string>`.
+        :paramtype tags: dict[str, str]
+        :keyword is_archived:
+        :paramtype is_archived: bool
+        """
+        super(DataContainerMutable, self).__init__(**kwargs)
+        self.description = kwargs.get('description', None)
+        self.tags = kwargs.get('tags', None)
+        self.is_archived = kwargs.get('is_archived', None)
+
+
+class DataField(msrest.serialization.Model):
+    """DataField.
+
+    :ivar type: Possible values include: "String", "Boolean", "Integer", "Decimal", "Date",
+     "Unknown", "Error", "Null", "DataRow", "List", "Stream".
+    :vartype type: str or ~azure.mgmt.machinelearningservices.models.FieldType
+    :ivar value: Anything.
+    :vartype value: any
+    """
+
+    _attribute_map = {
+        'type': {'key': 'type', 'type': 'str'},
+        'value': {'key': 'value', 'type': 'object'},
+    }
+
+    def __init__(
+        self,
+        **kwargs
+    ):
+        """
+        :keyword type: Possible values include: "String", "Boolean", "Integer", "Decimal", "Date",
+         "Unknown", "Error", "Null", "DataRow", "List", "Stream".
+        :paramtype type: str or ~azure.mgmt.machinelearningservices.models.FieldType
+        :keyword value: Anything.
+        :paramtype value: any
+        """
+        super(DataField, self).__init__(**kwargs)
+        self.type = kwargs.get('type', None)
+        self.value = kwargs.get('value', None)
+
+
+class Dataset(msrest.serialization.Model):
+    """Dataset.
+
+    :ivar dataset_id:
+    :vartype dataset_id: str
+    :ivar dataset_state:
+    :vartype dataset_state: ~azure.mgmt.machinelearningservices.models.DatasetState
+    :ivar latest:
+    :vartype latest: ~azure.mgmt.machinelearningservices.models.DatasetDefinition
+    :ivar next_version_id:
+    :vartype next_version_id: str
+    :ivar created_time:
+    :vartype created_time: ~datetime.datetime
+    :ivar modified_time:
+    :vartype modified_time: ~datetime.datetime
+    :ivar etag:
+    :vartype etag: str
+    :ivar name:
+    :vartype name: str
+    :ivar description:
+    :vartype description: str
+    :ivar tags: A set of tags. Dictionary of :code:`<string>`.
+    :vartype tags: dict[str, str]
+    :ivar is_visible:
+    :vartype is_visible: bool
+    :ivar default_compute:
+    :vartype default_compute: str
+    :ivar dataset_type:
+    :vartype dataset_type: str
+    """
+
+    _attribute_map = {
+        'dataset_id': {'key': 'datasetId', 'type': 'str'},
+        'dataset_state': {'key': 'datasetState', 'type': 'DatasetState'},
+        'latest': {'key': 'latest', 'type': 'DatasetDefinition'},
+        'next_version_id': {'key': 'nextVersionId', 'type': 'str'},
+        'created_time': {'key': 'createdTime', 'type': 'iso-8601'},
+        'modified_time': {'key': 'modifiedTime', 'type': 'iso-8601'},
+        'etag': {'key': 'etag', 'type': 'str'},
+        'name': {'key': 'name', 'type': 'str'},
+        'description': {'key': 'description', 'type': 'str'},
+        'tags': {'key': 'tags', 'type': '{str}'},
+        'is_visible': {'key': 'isVisible', 'type': 'bool'},
+        'default_compute': {'key': 'defaultCompute', 'type': 'str'},
+        'dataset_type': {'key': 'datasetType', 'type': 'str'},
+    }
+
+    def __init__(
+        self,
+        **kwargs
+    ):
+        """
+        :keyword dataset_id:
+        :paramtype dataset_id: str
+        :keyword dataset_state:
+        :paramtype dataset_state: ~azure.mgmt.machinelearningservices.models.DatasetState
+        :keyword latest:
+        :paramtype latest: ~azure.mgmt.machinelearningservices.models.DatasetDefinition
+        :keyword next_version_id:
+        :paramtype next_version_id: str
+        :keyword created_time:
+        :paramtype created_time: ~datetime.datetime
+        :keyword modified_time:
+        :paramtype modified_time: ~datetime.datetime
+        :keyword etag:
+        :paramtype etag: str
+        :keyword name:
+        :paramtype name: str
+        :keyword description:
+        :paramtype description: str
+        :keyword tags: A set of tags. Dictionary of :code:`<string>`.
+        :paramtype tags: dict[str, str]
+        :keyword is_visible:
+        :paramtype is_visible: bool
+        :keyword default_compute:
+        :paramtype default_compute: str
+        :keyword dataset_type:
+        :paramtype dataset_type: str
+        """
+        super(Dataset, self).__init__(**kwargs)
+        self.dataset_id = kwargs.get('dataset_id', None)
+        self.dataset_state = kwargs.get('dataset_state', None)
+        self.latest = kwargs.get('latest', None)
+        self.next_version_id = kwargs.get('next_version_id', None)
+        self.created_time = kwargs.get('created_time', None)
+        self.modified_time = kwargs.get('modified_time', None)
+        self.etag = kwargs.get('etag', None)
+        self.name = kwargs.get('name', None)
+        self.description = kwargs.get('description', None)
+        self.tags = kwargs.get('tags', None)
+        self.is_visible = kwargs.get('is_visible', None)
+        self.default_compute = kwargs.get('default_compute', None)
+        self.dataset_type = kwargs.get('dataset_type', None)
+
+
+class DatasetDefinition(msrest.serialization.Model):
+    """DatasetDefinition.
+
+    :ivar dataset_id:
+    :vartype dataset_id: str
+    :ivar version_id:
+    :vartype version_id: str
+    :ivar dataset_definition_state:
+    :vartype dataset_definition_state: ~azure.mgmt.machinelearningservices.models.DatasetState
+    :ivar dataflow:
+    :vartype dataflow: str
+    :ivar dataflow_type: Possible values include: "Json", "Yaml".
+    :vartype dataflow_type: str or ~azure.mgmt.machinelearningservices.models.DataflowType
+    :ivar data_path:
+    :vartype data_path: ~azure.mgmt.machinelearningservices.models.DatasetPath
+    :ivar partition_format_in_path:
+    :vartype partition_format_in_path: str
+    :ivar profile_action_result:
+    :vartype profile_action_result: ~azure.mgmt.machinelearningservices.models.ProfileActionResult
+    :ivar notes:
+    :vartype notes: str
+    :ivar etag:
+    :vartype etag: str
+    :ivar created_time:
+    :vartype created_time: ~datetime.datetime
+    :ivar modified_time:
+    :vartype modified_time: ~datetime.datetime
+    :ivar data_expiry_time:
+    :vartype data_expiry_time: ~datetime.datetime
+    :ivar created_by:
+    :vartype created_by: ~azure.mgmt.machinelearningservices.models.User
+    :ivar modified_by:
+    :vartype modified_by: ~azure.mgmt.machinelearningservices.models.User
+    :ivar file_type:
+    :vartype file_type: str
+    :ivar properties: Dictionary of :code:`<any>`.
+    :vartype properties: dict[str, any]
+    :ivar saved_dataset_id:
+    :vartype saved_dataset_id: str
+    :ivar telemetry_info: Dictionary of :code:`<string>`.
+    :vartype telemetry_info: dict[str, str]
+    :ivar use_description_tags_from_definition:
+    :vartype use_description_tags_from_definition: bool
+    :ivar description:
+    :vartype description: str
+    :ivar tags: A set of tags. Dictionary of :code:`<string>`.
+    :vartype tags: dict[str, str]
+    """
+
+    _attribute_map = {
+        'dataset_id': {'key': 'datasetId', 'type': 'str'},
+        'version_id': {'key': 'versionId', 'type': 'str'},
+        'dataset_definition_state': {'key': 'datasetDefinitionState', 'type': 'DatasetState'},
+        'dataflow': {'key': 'dataflow', 'type': 'str'},
+        'dataflow_type': {'key': 'dataflowType', 'type': 'str'},
+        'data_path': {'key': 'dataPath', 'type': 'DatasetPath'},
+        'partition_format_in_path': {'key': 'partitionFormatInPath', 'type': 'str'},
+        'profile_action_result': {'key': 'profileActionResult', 'type': 'ProfileActionResult'},
+        'notes': {'key': 'notes', 'type': 'str'},
+        'etag': {'key': 'etag', 'type': 'str'},
+        'created_time': {'key': 'createdTime', 'type': 'iso-8601'},
+        'modified_time': {'key': 'modifiedTime', 'type': 'iso-8601'},
+        'data_expiry_time': {'key': 'dataExpiryTime', 'type': 'iso-8601'},
+        'created_by': {'key': 'createdBy', 'type': 'User'},
+        'modified_by': {'key': 'modifiedBy', 'type': 'User'},
+        'file_type': {'key': 'fileType', 'type': 'str'},
+        'properties': {'key': 'properties', 'type': '{object}'},
+        'saved_dataset_id': {'key': 'savedDatasetId', 'type': 'str'},
+        'telemetry_info': {'key': 'telemetryInfo', 'type': '{str}'},
+        'use_description_tags_from_definition': {'key': 'useDescriptionTagsFromDefinition', 'type': 'bool'},
+        'description': {'key': 'description', 'type': 'str'},
+        'tags': {'key': 'tags', 'type': '{str}'},
+    }
+
+    def __init__(
+        self,
+        **kwargs
+    ):
+        """
+        :keyword dataset_id:
+        :paramtype dataset_id: str
+        :keyword version_id:
+        :paramtype version_id: str
+        :keyword dataset_definition_state:
+        :paramtype dataset_definition_state: ~azure.mgmt.machinelearningservices.models.DatasetState
+        :keyword dataflow:
+        :paramtype dataflow: str
+        :keyword dataflow_type: Possible values include: "Json", "Yaml".
+        :paramtype dataflow_type: str or ~azure.mgmt.machinelearningservices.models.DataflowType
+        :keyword data_path:
+        :paramtype data_path: ~azure.mgmt.machinelearningservices.models.DatasetPath
+        :keyword partition_format_in_path:
+        :paramtype partition_format_in_path: str
+        :keyword profile_action_result:
+        :paramtype profile_action_result:
+         ~azure.mgmt.machinelearningservices.models.ProfileActionResult
+        :keyword notes:
+        :paramtype notes: str
+        :keyword etag:
+        :paramtype etag: str
+        :keyword created_time:
+        :paramtype created_time: ~datetime.datetime
+        :keyword modified_time:
+        :paramtype modified_time: ~datetime.datetime
+        :keyword data_expiry_time:
+        :paramtype data_expiry_time: ~datetime.datetime
+        :keyword created_by:
+        :paramtype created_by: ~azure.mgmt.machinelearningservices.models.User
+        :keyword modified_by:
+        :paramtype modified_by: ~azure.mgmt.machinelearningservices.models.User
+        :keyword file_type:
+        :paramtype file_type: str
+        :keyword properties: Dictionary of :code:`<any>`.
+        :paramtype properties: dict[str, any]
+        :keyword saved_dataset_id:
+        :paramtype saved_dataset_id: str
+        :keyword telemetry_info: Dictionary of :code:`<string>`.
+        :paramtype telemetry_info: dict[str, str]
+        :keyword use_description_tags_from_definition:
+        :paramtype use_description_tags_from_definition: bool
+        :keyword description:
+        :paramtype description: str
+        :keyword tags: A set of tags. Dictionary of :code:`<string>`.
+        :paramtype tags: dict[str, str]
+        """
+        super(DatasetDefinition, self).__init__(**kwargs)
+        self.dataset_id = kwargs.get('dataset_id', None)
+        self.version_id = kwargs.get('version_id', None)
+        self.dataset_definition_state = kwargs.get('dataset_definition_state', None)
+        self.dataflow = kwargs.get('dataflow', None)
+        self.dataflow_type = kwargs.get('dataflow_type', None)
+        self.data_path = kwargs.get('data_path', None)
+        self.partition_format_in_path = kwargs.get('partition_format_in_path', None)
+        self.profile_action_result = kwargs.get('profile_action_result', None)
+        self.notes = kwargs.get('notes', None)
+        self.etag = kwargs.get('etag', None)
+        self.created_time = kwargs.get('created_time', None)
+        self.modified_time = kwargs.get('modified_time', None)
+        self.data_expiry_time = kwargs.get('data_expiry_time', None)
+        self.created_by = kwargs.get('created_by', None)
+        self.modified_by = kwargs.get('modified_by', None)
+        self.file_type = kwargs.get('file_type', None)
+        self.properties = kwargs.get('properties', None)
+        self.saved_dataset_id = kwargs.get('saved_dataset_id', None)
+        self.telemetry_info = kwargs.get('telemetry_info', None)
+        self.use_description_tags_from_definition = kwargs.get('use_description_tags_from_definition', None)
+        self.description = kwargs.get('description', None)
+        self.tags = kwargs.get('tags', None)
+
+
+class DatasetDefinitionReference(msrest.serialization.Model):
+    """DatasetDefinitionReference.
+
+    :ivar dataset_id:
+    :vartype dataset_id: str
+    :ivar definition_version:
+    :vartype definition_version: str
+    """
+
+    _attribute_map = {
+        'dataset_id': {'key': 'datasetId', 'type': 'str'},
+        'definition_version': {'key': 'definitionVersion', 'type': 'str'},
+    }
+
+    def __init__(
+        self,
+        **kwargs
+    ):
+        """
+        :keyword dataset_id:
+        :paramtype dataset_id: str
+        :keyword definition_version:
+        :paramtype definition_version: str
+        """
+        super(DatasetDefinitionReference, self).__init__(**kwargs)
+        self.dataset_id = kwargs.get('dataset_id', None)
+        self.definition_version = kwargs.get('definition_version', None)
+
+
+class DatasetPath(msrest.serialization.Model):
+    """DatasetPath.
+
+    :ivar datastore_name:
+    :vartype datastore_name: str
+    :ivar relative_path:
+    :vartype relative_path: str
+    :ivar azure_file_path:
+    :vartype azure_file_path: str
+    :ivar paths:
+    :vartype paths: list[str]
+    :ivar sql_data_path:
+    :vartype sql_data_path: ~azure.mgmt.machinelearningservices.models.SqlDataPath
+    :ivar http_url:
+    :vartype http_url: str
+    :ivar additional_properties: Dictionary of :code:`<any>`.
+    :vartype additional_properties: dict[str, any]
+    :ivar partition_format:
+    :vartype partition_format: str
+    :ivar partition_format_ignore_error:
+    :vartype partition_format_ignore_error: bool
+    """
+
+    _attribute_map = {
+        'datastore_name': {'key': 'datastoreName', 'type': 'str'},
+        'relative_path': {'key': 'relativePath', 'type': 'str'},
+        'azure_file_path': {'key': 'azureFilePath', 'type': 'str'},
+        'paths': {'key': 'paths', 'type': '[str]'},
+        'sql_data_path': {'key': 'sqlDataPath', 'type': 'SqlDataPath'},
+        'http_url': {'key': 'httpUrl', 'type': 'str'},
+        'additional_properties': {'key': 'additionalProperties', 'type': '{object}'},
+        'partition_format': {'key': 'partitionFormat', 'type': 'str'},
+        'partition_format_ignore_error': {'key': 'partitionFormatIgnoreError', 'type': 'bool'},
+    }
+
+    def __init__(
+        self,
+        **kwargs
+    ):
+        """
+        :keyword datastore_name:
+        :paramtype datastore_name: str
+        :keyword relative_path:
+        :paramtype relative_path: str
+        :keyword azure_file_path:
+        :paramtype azure_file_path: str
+        :keyword paths:
+        :paramtype paths: list[str]
+        :keyword sql_data_path:
+        :paramtype sql_data_path: ~azure.mgmt.machinelearningservices.models.SqlDataPath
+        :keyword http_url:
+        :paramtype http_url: str
+        :keyword additional_properties: Dictionary of :code:`<any>`.
+        :paramtype additional_properties: dict[str, any]
+        :keyword partition_format:
+        :paramtype partition_format: str
+        :keyword partition_format_ignore_error:
+        :paramtype partition_format_ignore_error: bool
+        """
+        super(DatasetPath, self).__init__(**kwargs)
+        self.datastore_name = kwargs.get('datastore_name', None)
+        self.relative_path = kwargs.get('relative_path', None)
+        self.azure_file_path = kwargs.get('azure_file_path', None)
+        self.paths = kwargs.get('paths', None)
+        self.sql_data_path = kwargs.get('sql_data_path', None)
+        self.http_url = kwargs.get('http_url', None)
+        self.additional_properties = kwargs.get('additional_properties', None)
+        self.partition_format = kwargs.get('partition_format', None)
+        self.partition_format_ignore_error = kwargs.get('partition_format_ignore_error', None)
+
+
+class DatasetState(msrest.serialization.Model):
+    """DatasetState.
+
+    :ivar state:
+    :vartype state: str
+    :ivar deprecated_by:
+    :vartype deprecated_by: ~azure.mgmt.machinelearningservices.models.DatasetDefinitionReference
+    :ivar etag:
+    :vartype etag: str
+    """
+
+    _attribute_map = {
+        'state': {'key': 'state', 'type': 'str'},
+        'deprecated_by': {'key': 'deprecatedBy', 'type': 'DatasetDefinitionReference'},
+        'etag': {'key': 'etag', 'type': 'str'},
+    }
+
+    def __init__(
+        self,
+        **kwargs
+    ):
+        """
+        :keyword state:
+        :paramtype state: str
+        :keyword deprecated_by:
+        :paramtype deprecated_by: ~azure.mgmt.machinelearningservices.models.DatasetDefinitionReference
+        :keyword etag:
+        :paramtype etag: str
+        """
+        super(DatasetState, self).__init__(**kwargs)
+        self.state = kwargs.get('state', None)
+        self.deprecated_by = kwargs.get('deprecated_by', None)
+        self.etag = kwargs.get('etag', None)
+
+
+class DatasetV2(msrest.serialization.Model):
+    """DatasetV2.
+
+    :ivar dataset_id:
+    :vartype dataset_id: str
+    :ivar name:
+    :vartype name: str
+    :ivar version_id:
+    :vartype version_id: str
+    :ivar dataflow:
+    :vartype dataflow: str
+    :ivar created_time:
+    :vartype created_time: ~datetime.datetime
+    :ivar modified_time:
+    :vartype modified_time: ~datetime.datetime
+    :ivar created_by:
+    :vartype created_by: ~azure.mgmt.machinelearningservices.models.User
+    :ivar modified_by:
+    :vartype modified_by: ~azure.mgmt.machinelearningservices.models.User
+    :ivar properties: Dictionary of :code:`<string>`.
+    :vartype properties: dict[str, str]
+    :ivar telemetry_info: Dictionary of :code:`<string>`.
+    :vartype telemetry_info: dict[str, str]
+    :ivar description:
+    :vartype description: str
+    :ivar is_anonymous:
+    :vartype is_anonymous: bool
+    :ivar tags: A set of tags. Dictionary of :code:`<string>`.
+    :vartype tags: dict[str, str]
+    :ivar legacy_properties: Dictionary of :code:`<any>`.
+    :vartype legacy_properties: dict[str, any]
+    :ivar data_expiry_time:
+    :vartype data_expiry_time: ~datetime.datetime
+    :ivar legacy: Dictionary of :code:`<any>`.
+    :vartype legacy: dict[str, any]
+    """
+
+    _attribute_map = {
+        'dataset_id': {'key': 'datasetId', 'type': 'str'},
+        'name': {'key': 'name', 'type': 'str'},
+        'version_id': {'key': 'versionId', 'type': 'str'},
+        'dataflow': {'key': 'dataflow', 'type': 'str'},
+        'created_time': {'key': 'createdTime', 'type': 'iso-8601'},
+        'modified_time': {'key': 'modifiedTime', 'type': 'iso-8601'},
+        'created_by': {'key': 'createdBy', 'type': 'User'},
+        'modified_by': {'key': 'modifiedBy', 'type': 'User'},
+        'properties': {'key': 'properties', 'type': '{str}'},
+        'telemetry_info': {'key': 'telemetryInfo', 'type': '{str}'},
+        'description': {'key': 'description', 'type': 'str'},
+        'is_anonymous': {'key': 'isAnonymous', 'type': 'bool'},
+        'tags': {'key': 'tags', 'type': '{str}'},
+        'legacy_properties': {'key': 'legacyProperties', 'type': '{object}'},
+        'data_expiry_time': {'key': 'dataExpiryTime', 'type': 'iso-8601'},
+        'legacy': {'key': 'legacy', 'type': '{object}'},
+    }
+
+    def __init__(
+        self,
+        **kwargs
+    ):
+        """
+        :keyword dataset_id:
+        :paramtype dataset_id: str
+        :keyword name:
+        :paramtype name: str
+        :keyword version_id:
+        :paramtype version_id: str
+        :keyword dataflow:
+        :paramtype dataflow: str
+        :keyword created_time:
+        :paramtype created_time: ~datetime.datetime
+        :keyword modified_time:
+        :paramtype modified_time: ~datetime.datetime
+        :keyword created_by:
+        :paramtype created_by: ~azure.mgmt.machinelearningservices.models.User
+        :keyword modified_by:
+        :paramtype modified_by: ~azure.mgmt.machinelearningservices.models.User
+        :keyword properties: Dictionary of :code:`<string>`.
+        :paramtype properties: dict[str, str]
+        :keyword telemetry_info: Dictionary of :code:`<string>`.
+        :paramtype telemetry_info: dict[str, str]
+        :keyword description:
+        :paramtype description: str
+        :keyword is_anonymous:
+        :paramtype is_anonymous: bool
+        :keyword tags: A set of tags. Dictionary of :code:`<string>`.
+        :paramtype tags: dict[str, str]
+        :keyword legacy_properties: Dictionary of :code:`<any>`.
+        :paramtype legacy_properties: dict[str, any]
+        :keyword data_expiry_time:
+        :paramtype data_expiry_time: ~datetime.datetime
+        :keyword legacy: Dictionary of :code:`<any>`.
+        :paramtype legacy: dict[str, any]
+        """
+        super(DatasetV2, self).__init__(**kwargs)
+        self.dataset_id = kwargs.get('dataset_id', None)
+        self.name = kwargs.get('name', None)
+        self.version_id = kwargs.get('version_id', None)
+        self.dataflow = kwargs.get('dataflow', None)
+        self.created_time = kwargs.get('created_time', None)
+        self.modified_time = kwargs.get('modified_time', None)
+        self.created_by = kwargs.get('created_by', None)
+        self.modified_by = kwargs.get('modified_by', None)
+        self.properties = kwargs.get('properties', None)
+        self.telemetry_info = kwargs.get('telemetry_info', None)
+        self.description = kwargs.get('description', None)
+        self.is_anonymous = kwargs.get('is_anonymous', None)
+        self.tags = kwargs.get('tags', None)
+        self.legacy_properties = kwargs.get('legacy_properties', None)
+        self.data_expiry_time = kwargs.get('data_expiry_time', None)
+        self.legacy = kwargs.get('legacy', None)
+
+
+class DataUriV2Response(msrest.serialization.Model):
+    """DataUriV2Response.
+
+    :ivar uri:
+    :vartype uri: str
+    :ivar type:
+    :vartype type: str
+    """
+
+    _attribute_map = {
+        'uri': {'key': 'uri', 'type': 'str'},
+        'type': {'key': 'type', 'type': 'str'},
+    }
+
+    def __init__(
+        self,
+        **kwargs
+    ):
+        """
+        :keyword uri:
+        :paramtype uri: str
+        :keyword type:
+        :paramtype type: str
+        """
+        super(DataUriV2Response, self).__init__(**kwargs)
+        self.uri = kwargs.get('uri', None)
+        self.type = kwargs.get('type', None)
+
+
+class DataVersion(msrest.serialization.Model):
+    """DataVersion.
+
+    All required parameters must be populated in order to send to Azure.
+
+    :ivar asset_id:
+    :vartype asset_id: str
+    :ivar data_container_name: Required.
+    :vartype data_container_name: str
+    :ivar data_type: Required.
+    :vartype data_type: str
+    :ivar data_uri: Required.
+    :vartype data_uri: str
+    :ivar version_id: Required.
+    :vartype version_id: str
+    :ivar mutable_props:
+    :vartype mutable_props: ~azure.mgmt.machinelearningservices.models.DataVersionMutable
+    :ivar referenced_data_uris:
+    :vartype referenced_data_uris: list[str]
+    :ivar properties: Dictionary of :code:`<string>`.
+    :vartype properties: dict[str, str]
+    """
+
+    _validation = {
+        'data_container_name': {'required': True},
+        'data_type': {'required': True},
+        'data_uri': {'required': True},
+        'version_id': {'required': True},
+    }
+
+    _attribute_map = {
+        'asset_id': {'key': 'assetId', 'type': 'str'},
+        'data_container_name': {'key': 'dataContainerName', 'type': 'str'},
+        'data_type': {'key': 'dataType', 'type': 'str'},
+        'data_uri': {'key': 'dataUri', 'type': 'str'},
+        'version_id': {'key': 'versionId', 'type': 'str'},
+        'mutable_props': {'key': 'mutableProps', 'type': 'DataVersionMutable'},
+        'referenced_data_uris': {'key': 'referencedDataUris', 'type': '[str]'},
+        'properties': {'key': 'properties', 'type': '{str}'},
+    }
+
+    def __init__(
+        self,
+        **kwargs
+    ):
+        """
+        :keyword asset_id:
+        :paramtype asset_id: str
+        :keyword data_container_name: Required.
+        :paramtype data_container_name: str
+        :keyword data_type: Required.
+        :paramtype data_type: str
+        :keyword data_uri: Required.
+        :paramtype data_uri: str
+        :keyword version_id: Required.
+        :paramtype version_id: str
+        :keyword mutable_props:
+        :paramtype mutable_props: ~azure.mgmt.machinelearningservices.models.DataVersionMutable
+        :keyword referenced_data_uris:
+        :paramtype referenced_data_uris: list[str]
+        :keyword properties: Dictionary of :code:`<string>`.
+        :paramtype properties: dict[str, str]
+        """
+        super(DataVersion, self).__init__(**kwargs)
+        self.asset_id = kwargs.get('asset_id', None)
+        self.data_container_name = kwargs['data_container_name']
+        self.data_type = kwargs['data_type']
+        self.data_uri = kwargs['data_uri']
+        self.version_id = kwargs['version_id']
+        self.mutable_props = kwargs.get('mutable_props', None)
+        self.referenced_data_uris = kwargs.get('referenced_data_uris', None)
+        self.properties = kwargs.get('properties', None)
+
+
+class DataVersionEntity(msrest.serialization.Model):
+    """DataVersionEntity.
+
+    :ivar data_version:
+    :vartype data_version: ~azure.mgmt.machinelearningservices.models.DataVersion
+    :ivar entity_metadata:
+    :vartype entity_metadata: ~azure.mgmt.machinelearningservices.models.EntityMetadata
+    """
+
+    _attribute_map = {
+        'data_version': {'key': 'dataVersion', 'type': 'DataVersion'},
+        'entity_metadata': {'key': 'entityMetadata', 'type': 'EntityMetadata'},
+    }
+
+    def __init__(
+        self,
+        **kwargs
+    ):
+        """
+        :keyword data_version:
+        :paramtype data_version: ~azure.mgmt.machinelearningservices.models.DataVersion
+        :keyword entity_metadata:
+        :paramtype entity_metadata: ~azure.mgmt.machinelearningservices.models.EntityMetadata
+        """
+        super(DataVersionEntity, self).__init__(**kwargs)
+        self.data_version = kwargs.get('data_version', None)
+        self.entity_metadata = kwargs.get('entity_metadata', None)
+
+
+class DataVersionMutable(msrest.serialization.Model):
+    """DataVersionMutable.
+
+    :ivar data_expiry_time:
+    :vartype data_expiry_time: ~datetime.datetime
+    :ivar description:
+    :vartype description: str
+    :ivar tags: A set of tags. Dictionary of :code:`<string>`.
+    :vartype tags: dict[str, str]
+    :ivar is_archived:
+    :vartype is_archived: bool
+    """
+
+    _attribute_map = {
+        'data_expiry_time': {'key': 'dataExpiryTime', 'type': 'iso-8601'},
+        'description': {'key': 'description', 'type': 'str'},
+        'tags': {'key': 'tags', 'type': '{str}'},
+        'is_archived': {'key': 'isArchived', 'type': 'bool'},
+    }
+
+    def __init__(
+        self,
+        **kwargs
+    ):
+        """
+        :keyword data_expiry_time:
+        :paramtype data_expiry_time: ~datetime.datetime
+        :keyword description:
+        :paramtype description: str
+        :keyword tags: A set of tags. Dictionary of :code:`<string>`.
+        :paramtype tags: dict[str, str]
+        :keyword is_archived:
+        :paramtype is_archived: bool
+        """
+        super(DataVersionMutable, self).__init__(**kwargs)
+        self.data_expiry_time = kwargs.get('data_expiry_time', None)
+        self.description = kwargs.get('description', None)
+        self.tags = kwargs.get('tags', None)
+        self.is_archived = kwargs.get('is_archived', None)
+
+
+class DataViewSetResult(msrest.serialization.Model):
+    """DataViewSetResult.
+
+    :ivar schema:
+    :vartype schema: list[~azure.mgmt.machinelearningservices.models.ColumnDefinition]
+    :ivar rows:
+    :vartype rows: list[list[~azure.mgmt.machinelearningservices.models.DataField]]
+    """
+
+    _attribute_map = {
+        'schema': {'key': 'schema', 'type': '[ColumnDefinition]'},
+        'rows': {'key': 'rows', 'type': '[[DataField]]'},
+    }
+
+    def __init__(
+        self,
+        **kwargs
+    ):
+        """
+        :keyword schema:
+        :paramtype schema: list[~azure.mgmt.machinelearningservices.models.ColumnDefinition]
+        :keyword rows:
+        :paramtype rows: list[list[~azure.mgmt.machinelearningservices.models.DataField]]
+        """
+        super(DataViewSetResult, self).__init__(**kwargs)
+        self.schema = kwargs.get('schema', None)
+        self.rows = kwargs.get('rows', None)
+
+
+class EntityMetadata(msrest.serialization.Model):
+    """EntityMetadata.
+
+    :ivar etag:
+    :vartype etag: str
+    :ivar created_time:
+    :vartype created_time: ~datetime.datetime
+    :ivar modified_time:
+    :vartype modified_time: ~datetime.datetime
+    :ivar created_by:
+    :vartype created_by: ~azure.mgmt.machinelearningservices.models.User
+    :ivar modified_by:
+    :vartype modified_by: ~azure.mgmt.machinelearningservices.models.User
+    """
+
+    _attribute_map = {
+        'etag': {'key': 'etag', 'type': 'str'},
+        'created_time': {'key': 'createdTime', 'type': 'iso-8601'},
+        'modified_time': {'key': 'modifiedTime', 'type': 'iso-8601'},
+        'created_by': {'key': 'createdBy', 'type': 'User'},
+        'modified_by': {'key': 'modifiedBy', 'type': 'User'},
+    }
+
+    def __init__(
+        self,
+        **kwargs
+    ):
+        """
+        :keyword etag:
+        :paramtype etag: str
+        :keyword created_time:
+        :paramtype created_time: ~datetime.datetime
+        :keyword modified_time:
+        :paramtype modified_time: ~datetime.datetime
+        :keyword created_by:
+        :paramtype created_by: ~azure.mgmt.machinelearningservices.models.User
+        :keyword modified_by:
+        :paramtype modified_by: ~azure.mgmt.machinelearningservices.models.User
+        """
+        super(EntityMetadata, self).__init__(**kwargs)
+        self.etag = kwargs.get('etag', None)
+        self.created_time = kwargs.get('created_time', None)
+        self.modified_time = kwargs.get('modified_time', None)
+        self.created_by = kwargs.get('created_by', None)
+        self.modified_by = kwargs.get('modified_by', None)
+
+
+class ErrorAdditionalInfo(msrest.serialization.Model):
+    """The resource management error additional info.
+
+    :ivar type: The additional info type.
+    :vartype type: str
+    :ivar info: The additional info.
+    :vartype info: any
+    """
+
+    _attribute_map = {
+        'type': {'key': 'type', 'type': 'str'},
+        'info': {'key': 'info', 'type': 'object'},
+    }
+
+    def __init__(
+        self,
+        **kwargs
+    ):
+        """
+        :keyword type: The additional info type.
+        :paramtype type: str
+        :keyword info: The additional info.
+        :paramtype info: any
+        """
+        super(ErrorAdditionalInfo, self).__init__(**kwargs)
+        self.type = kwargs.get('type', None)
+        self.info = kwargs.get('info', None)
+
+
+class ErrorResponse(msrest.serialization.Model):
+    """The error response.
+
+    :ivar error: The root error.
+    :vartype error: ~azure.mgmt.machinelearningservices.models.RootError
+    :ivar correlation: Dictionary containing correlation details for the error.
+    :vartype correlation: dict[str, str]
+    :ivar environment: The hosting environment.
+    :vartype environment: str
+    :ivar location: The Azure region.
+    :vartype location: str
+    :ivar time: The time in UTC.
+    :vartype time: ~datetime.datetime
+    :ivar component_name: Component name where error originated/encountered.
+    :vartype component_name: str
+    """
+
+    _attribute_map = {
+        'error': {'key': 'error', 'type': 'RootError'},
+        'correlation': {'key': 'correlation', 'type': '{str}'},
+        'environment': {'key': 'environment', 'type': 'str'},
+        'location': {'key': 'location', 'type': 'str'},
+        'time': {'key': 'time', 'type': 'iso-8601'},
+        'component_name': {'key': 'componentName', 'type': 'str'},
+    }
+
+    def __init__(
+        self,
+        **kwargs
+    ):
+        """
+        :keyword error: The root error.
+        :paramtype error: ~azure.mgmt.machinelearningservices.models.RootError
+        :keyword correlation: Dictionary containing correlation details for the error.
+        :paramtype correlation: dict[str, str]
+        :keyword environment: The hosting environment.
+        :paramtype environment: str
+        :keyword location: The Azure region.
+        :paramtype location: str
+        :keyword time: The time in UTC.
+        :paramtype time: ~datetime.datetime
+        :keyword component_name: Component name where error originated/encountered.
+        :paramtype component_name: str
+        """
+        super(ErrorResponse, self).__init__(**kwargs)
+        self.error = kwargs.get('error', None)
+        self.correlation = kwargs.get('correlation', None)
+        self.environment = kwargs.get('environment', None)
+        self.location = kwargs.get('location', None)
+        self.time = kwargs.get('time', None)
+        self.component_name = kwargs.get('component_name', None)
+
+
+class HistogramBin(msrest.serialization.Model):
+    """HistogramBin.
+
+    :ivar lower_bound:
+    :vartype lower_bound: float
+    :ivar upper_bound:
+    :vartype upper_bound: float
+    :ivar count:
+    :vartype count: float
+    """
+
+    _attribute_map = {
+        'lower_bound': {'key': 'lowerBound', 'type': 'float'},
+        'upper_bound': {'key': 'upperBound', 'type': 'float'},
+        'count': {'key': 'count', 'type': 'float'},
+    }
+
+    def __init__(
+        self,
+        **kwargs
+    ):
+        """
+        :keyword lower_bound:
+        :paramtype lower_bound: float
+        :keyword upper_bound:
+        :paramtype upper_bound: float
+        :keyword count:
+        :paramtype count: float
+        """
+        super(HistogramBin, self).__init__(**kwargs)
+        self.lower_bound = kwargs.get('lower_bound', None)
+        self.upper_bound = kwargs.get('upper_bound', None)
+        self.count = kwargs.get('count', None)
+
+
+class HttpContent(msrest.serialization.Model):
+    """HttpContent.
+
+    Variables are only populated by the server, and will be ignored when sending a request.
+
+    :ivar headers:
+    :vartype headers:
+     list[~azure.mgmt.machinelearningservices.models.KeyValuePairStringIEnumerable1]
+    """
+
+    _validation = {
+        'headers': {'readonly': True},
+    }
+
+    _attribute_map = {
+        'headers': {'key': 'headers', 'type': '[KeyValuePairStringIEnumerable1]'},
+    }
+
+    def __init__(
+        self,
+        **kwargs
+    ):
+        """
+        """
+        super(HttpContent, self).__init__(**kwargs)
+        self.headers = None
+
+
+class HttpMethod(msrest.serialization.Model):
+    """HttpMethod.
+
+    :ivar method:
+    :vartype method: str
+    """
+
+    _attribute_map = {
+        'method': {'key': 'method', 'type': 'str'},
+    }
+
+    def __init__(
+        self,
+        **kwargs
+    ):
+        """
+        :keyword method:
+        :paramtype method: str
+        """
+        super(HttpMethod, self).__init__(**kwargs)
+        self.method = kwargs.get('method', None)
+
+
+class HttpRequestMessage(msrest.serialization.Model):
+    """HttpRequestMessage.
+
+    Variables are only populated by the server, and will be ignored when sending a request.
+
+    :ivar version:
+    :vartype version: str
+    :ivar version_policy: Possible values include: "RequestVersionOrLower",
+     "RequestVersionOrHigher", "RequestVersionExact".
+    :vartype version_policy: str or ~azure.mgmt.machinelearningservices.models.HttpVersionPolicy
+    :ivar content:
+    :vartype content: ~azure.mgmt.machinelearningservices.models.HttpContent
+    :ivar method:
+    :vartype method: ~azure.mgmt.machinelearningservices.models.HttpMethod
+    :ivar request_uri:
+    :vartype request_uri: str
+    :ivar headers:
+    :vartype headers:
+     list[~azure.mgmt.machinelearningservices.models.KeyValuePairStringIEnumerable1]
+    :ivar options: Dictionary of :code:`<any>`.
+    :vartype options: dict[str, any]
+    """
+
+    _validation = {
+        'headers': {'readonly': True},
+        'options': {'readonly': True},
+    }
+
+    _attribute_map = {
+        'version': {'key': 'version', 'type': 'str'},
+        'version_policy': {'key': 'versionPolicy', 'type': 'str'},
+        'content': {'key': 'content', 'type': 'HttpContent'},
+        'method': {'key': 'method', 'type': 'HttpMethod'},
+        'request_uri': {'key': 'requestUri', 'type': 'str'},
+        'headers': {'key': 'headers', 'type': '[KeyValuePairStringIEnumerable1]'},
+        'options': {'key': 'options', 'type': '{object}'},
+    }
+
+    def __init__(
+        self,
+        **kwargs
+    ):
+        """
+        :keyword version:
+        :paramtype version: str
+        :keyword version_policy: Possible values include: "RequestVersionOrLower",
+         "RequestVersionOrHigher", "RequestVersionExact".
+        :paramtype version_policy: str or ~azure.mgmt.machinelearningservices.models.HttpVersionPolicy
+        :keyword content:
+        :paramtype content: ~azure.mgmt.machinelearningservices.models.HttpContent
+        :keyword method:
+        :paramtype method: ~azure.mgmt.machinelearningservices.models.HttpMethod
+        :keyword request_uri:
+        :paramtype request_uri: str
+        """
+        super(HttpRequestMessage, self).__init__(**kwargs)
+        self.version = kwargs.get('version', None)
+        self.version_policy = kwargs.get('version_policy', None)
+        self.content = kwargs.get('content', None)
+        self.method = kwargs.get('method', None)
+        self.request_uri = kwargs.get('request_uri', None)
+        self.headers = None
+        self.options = None
+
+
+class HttpResponseMessage(msrest.serialization.Model):
+    """HttpResponseMessage.
+
+    Variables are only populated by the server, and will be ignored when sending a request.
+
+    :ivar version:
+    :vartype version: str
+    :ivar content:
+    :vartype content: ~azure.mgmt.machinelearningservices.models.HttpContent
+    :ivar status_code: Possible values include: "Continue", "SwitchingProtocols", "Processing",
+     "EarlyHints", "OK", "Created", "Accepted", "NonAuthoritativeInformation", "NoContent",
+     "ResetContent", "PartialContent", "MultiStatus", "AlreadyReported", "IMUsed", "Ambiguous",
+     "Moved", "Redirect", "RedirectMethod", "NotModified", "UseProxy", "Unused",
+     "TemporaryRedirect", "PermanentRedirect", "BadRequest", "Unauthorized", "PaymentRequired",
+     "Forbidden", "NotFound", "MethodNotAllowed", "NotAcceptable", "ProxyAuthenticationRequired",
+     "RequestTimeout", "Conflict", "Gone", "LengthRequired", "PreconditionFailed",
+     "RequestEntityTooLarge", "RequestUriTooLong", "UnsupportedMediaType",
+     "RequestedRangeNotSatisfiable", "ExpectationFailed", "MisdirectedRequest",
+     "UnprocessableEntity", "Locked", "FailedDependency", "UpgradeRequired", "PreconditionRequired",
+     "TooManyRequests", "RequestHeaderFieldsTooLarge", "UnavailableForLegalReasons",
+     "InternalServerError", "NotImplemented", "BadGateway", "ServiceUnavailable", "GatewayTimeout",
+     "HttpVersionNotSupported", "VariantAlsoNegotiates", "InsufficientStorage", "LoopDetected",
+     "NotExtended", "NetworkAuthenticationRequired".
+    :vartype status_code: str or ~azure.mgmt.machinelearningservices.models.HttpStatusCode
+    :ivar reason_phrase:
+    :vartype reason_phrase: str
+    :ivar headers:
+    :vartype headers:
+     list[~azure.mgmt.machinelearningservices.models.KeyValuePairStringIEnumerable1]
+    :ivar trailing_headers:
+    :vartype trailing_headers:
+     list[~azure.mgmt.machinelearningservices.models.KeyValuePairStringIEnumerable1]
+    :ivar request_message:
+    :vartype request_message: ~azure.mgmt.machinelearningservices.models.HttpRequestMessage
+    :ivar is_success_status_code:
+    :vartype is_success_status_code: bool
+    """
+
+    _validation = {
+        'headers': {'readonly': True},
+        'trailing_headers': {'readonly': True},
+        'is_success_status_code': {'readonly': True},
+    }
+
+    _attribute_map = {
+        'version': {'key': 'version', 'type': 'str'},
+        'content': {'key': 'content', 'type': 'HttpContent'},
+        'status_code': {'key': 'statusCode', 'type': 'str'},
+        'reason_phrase': {'key': 'reasonPhrase', 'type': 'str'},
+        'headers': {'key': 'headers', 'type': '[KeyValuePairStringIEnumerable1]'},
+        'trailing_headers': {'key': 'trailingHeaders', 'type': '[KeyValuePairStringIEnumerable1]'},
+        'request_message': {'key': 'requestMessage', 'type': 'HttpRequestMessage'},
+        'is_success_status_code': {'key': 'isSuccessStatusCode', 'type': 'bool'},
+    }
+
+    def __init__(
+        self,
+        **kwargs
+    ):
+        """
+        :keyword version:
+        :paramtype version: str
+        :keyword content:
+        :paramtype content: ~azure.mgmt.machinelearningservices.models.HttpContent
+        :keyword status_code: Possible values include: "Continue", "SwitchingProtocols", "Processing",
+         "EarlyHints", "OK", "Created", "Accepted", "NonAuthoritativeInformation", "NoContent",
+         "ResetContent", "PartialContent", "MultiStatus", "AlreadyReported", "IMUsed", "Ambiguous",
+         "Moved", "Redirect", "RedirectMethod", "NotModified", "UseProxy", "Unused",
+         "TemporaryRedirect", "PermanentRedirect", "BadRequest", "Unauthorized", "PaymentRequired",
+         "Forbidden", "NotFound", "MethodNotAllowed", "NotAcceptable", "ProxyAuthenticationRequired",
+         "RequestTimeout", "Conflict", "Gone", "LengthRequired", "PreconditionFailed",
+         "RequestEntityTooLarge", "RequestUriTooLong", "UnsupportedMediaType",
+         "RequestedRangeNotSatisfiable", "ExpectationFailed", "MisdirectedRequest",
+         "UnprocessableEntity", "Locked", "FailedDependency", "UpgradeRequired", "PreconditionRequired",
+         "TooManyRequests", "RequestHeaderFieldsTooLarge", "UnavailableForLegalReasons",
+         "InternalServerError", "NotImplemented", "BadGateway", "ServiceUnavailable", "GatewayTimeout",
+         "HttpVersionNotSupported", "VariantAlsoNegotiates", "InsufficientStorage", "LoopDetected",
+         "NotExtended", "NetworkAuthenticationRequired".
+        :paramtype status_code: str or ~azure.mgmt.machinelearningservices.models.HttpStatusCode
+        :keyword reason_phrase:
+        :paramtype reason_phrase: str
+        :keyword request_message:
+        :paramtype request_message: ~azure.mgmt.machinelearningservices.models.HttpRequestMessage
+        """
+        super(HttpResponseMessage, self).__init__(**kwargs)
+        self.version = kwargs.get('version', None)
+        self.content = kwargs.get('content', None)
+        self.status_code = kwargs.get('status_code', None)
+        self.reason_phrase = kwargs.get('reason_phrase', None)
+        self.headers = None
+        self.trailing_headers = None
+        self.request_message = kwargs.get('request_message', None)
+        self.is_success_status_code = None
+
+
+class InnerErrorResponse(msrest.serialization.Model):
+    """A nested structure of errors.
+
+    :ivar code: The error code.
+    :vartype code: str
+    :ivar inner_error: A nested structure of errors.
+    :vartype inner_error: ~azure.mgmt.machinelearningservices.models.InnerErrorResponse
+    """
+
+    _attribute_map = {
+        'code': {'key': 'code', 'type': 'str'},
+        'inner_error': {'key': 'innerError', 'type': 'InnerErrorResponse'},
+    }
+
+    def __init__(
+        self,
+        **kwargs
+    ):
+        """
+        :keyword code: The error code.
+        :paramtype code: str
+        :keyword inner_error: A nested structure of errors.
+        :paramtype inner_error: ~azure.mgmt.machinelearningservices.models.InnerErrorResponse
+        """
+        super(InnerErrorResponse, self).__init__(**kwargs)
+        self.code = kwargs.get('code', None)
+        self.inner_error = kwargs.get('inner_error', None)
+
+
+class KeyValuePairStringIEnumerable1(msrest.serialization.Model):
+    """KeyValuePairStringIEnumerable1.
+
+    :ivar key:
+    :vartype key: str
+    :ivar value:
+    :vartype value: list[str]
+    """
+
+    _attribute_map = {
+        'key': {'key': 'key', 'type': 'str'},
+        'value': {'key': 'value', 'type': '[str]'},
+    }
+
+    def __init__(
+        self,
+        **kwargs
+    ):
+        """
+        :keyword key:
+        :paramtype key: str
+        :keyword value:
+        :paramtype value: list[str]
+        """
+        super(KeyValuePairStringIEnumerable1, self).__init__(**kwargs)
+        self.key = kwargs.get('key', None)
+        self.value = kwargs.get('value', None)
+
+
+class LongRunningOperationResponse1LongRunningOperationResponseObject(msrest.serialization.Model):
+    """LongRunningOperationResponse1LongRunningOperationResponseObject.
+
+    :ivar completion_result: Anything.
+    :vartype completion_result: any
+    :ivar location:
+    :vartype location: str
+    :ivar operation_result:
+    :vartype operation_result: str
+    """
+
+    _attribute_map = {
+        'completion_result': {'key': 'completionResult', 'type': 'object'},
+        'location': {'key': 'location', 'type': 'str'},
+        'operation_result': {'key': 'operationResult', 'type': 'str'},
+    }
+
+    def __init__(
+        self,
+        **kwargs
+    ):
+        """
+        :keyword completion_result: Anything.
+        :paramtype completion_result: any
+        :keyword location:
+        :paramtype location: str
+        :keyword operation_result:
+        :paramtype operation_result: str
+        """
+        super(LongRunningOperationResponse1LongRunningOperationResponseObject, self).__init__(**kwargs)
+        self.completion_result = kwargs.get('completion_result', None)
+        self.location = kwargs.get('location', None)
+        self.operation_result = kwargs.get('operation_result', None)
+
+
+class Moments(msrest.serialization.Model):
+    """Moments.
+
+    :ivar mean:
+    :vartype mean: float
+    :ivar standard_deviation:
+    :vartype standard_deviation: float
+    :ivar variance:
+    :vartype variance: float
+    :ivar skewness:
+    :vartype skewness: float
+    :ivar kurtosis:
+    :vartype kurtosis: float
+    """
+
+    _attribute_map = {
+        'mean': {'key': 'mean', 'type': 'float'},
+        'standard_deviation': {'key': 'standardDeviation', 'type': 'float'},
+        'variance': {'key': 'variance', 'type': 'float'},
+        'skewness': {'key': 'skewness', 'type': 'float'},
+        'kurtosis': {'key': 'kurtosis', 'type': 'float'},
+    }
+
+    def __init__(
+        self,
+        **kwargs
+    ):
+        """
+        :keyword mean:
+        :paramtype mean: float
+        :keyword standard_deviation:
+        :paramtype standard_deviation: float
+        :keyword variance:
+        :paramtype variance: float
+        :keyword skewness:
+        :paramtype skewness: float
+        :keyword kurtosis:
+        :paramtype kurtosis: float
+        """
+        super(Moments, self).__init__(**kwargs)
+        self.mean = kwargs.get('mean', None)
+        self.standard_deviation = kwargs.get('standard_deviation', None)
+        self.variance = kwargs.get('variance', None)
+        self.skewness = kwargs.get('skewness', None)
+        self.kurtosis = kwargs.get('kurtosis', None)
+
+
+class PaginatedDataContainerEntityList(msrest.serialization.Model):
+    """A paginated list of DataContainerEntitys.
+
+    :ivar value: An array of objects of type DataContainerEntity.
+    :vartype value: list[~azure.mgmt.machinelearningservices.models.DataContainerEntity]
+    :ivar continuation_token: The token used in retrieving the next page. If null, there are no
+     additional pages.
+    :vartype continuation_token: str
+    :ivar next_link: The link to the next page constructed using the continuationToken.  If null,
+     there are no additional pages.
+    :vartype next_link: str
+    """
+
+    _attribute_map = {
+        'value': {'key': 'value', 'type': '[DataContainerEntity]'},
+        'continuation_token': {'key': 'continuationToken', 'type': 'str'},
+        'next_link': {'key': 'nextLink', 'type': 'str'},
+    }
+
+    def __init__(
+        self,
+        **kwargs
+    ):
+        """
+        :keyword value: An array of objects of type DataContainerEntity.
+        :paramtype value: list[~azure.mgmt.machinelearningservices.models.DataContainerEntity]
+        :keyword continuation_token: The token used in retrieving the next page. If null, there are no
+         additional pages.
+        :paramtype continuation_token: str
+        :keyword next_link: The link to the next page constructed using the continuationToken.  If
+         null, there are no additional pages.
+        :paramtype next_link: str
+        """
+        super(PaginatedDataContainerEntityList, self).__init__(**kwargs)
+        self.value = kwargs.get('value', None)
+        self.continuation_token = kwargs.get('continuation_token', None)
+        self.next_link = kwargs.get('next_link', None)
+
+
+class PaginatedDatasetDefinitionList(msrest.serialization.Model):
+    """A paginated list of DatasetDefinitions.
+
+    :ivar value: An array of objects of type DatasetDefinition.
+    :vartype value: list[~azure.mgmt.machinelearningservices.models.DatasetDefinition]
+    :ivar continuation_token: The token used in retrieving the next page. If null, there are no
+     additional pages.
+    :vartype continuation_token: str
+    :ivar next_link: The link to the next page constructed using the continuationToken.  If null,
+     there are no additional pages.
+    :vartype next_link: str
+    """
+
+    _attribute_map = {
+        'value': {'key': 'value', 'type': '[DatasetDefinition]'},
+        'continuation_token': {'key': 'continuationToken', 'type': 'str'},
+        'next_link': {'key': 'nextLink', 'type': 'str'},
+    }
+
+    def __init__(
+        self,
+        **kwargs
+    ):
+        """
+        :keyword value: An array of objects of type DatasetDefinition.
+        :paramtype value: list[~azure.mgmt.machinelearningservices.models.DatasetDefinition]
+        :keyword continuation_token: The token used in retrieving the next page. If null, there are no
+         additional pages.
+        :paramtype continuation_token: str
+        :keyword next_link: The link to the next page constructed using the continuationToken.  If
+         null, there are no additional pages.
+        :paramtype next_link: str
+        """
+        super(PaginatedDatasetDefinitionList, self).__init__(**kwargs)
+        self.value = kwargs.get('value', None)
+        self.continuation_token = kwargs.get('continuation_token', None)
+        self.next_link = kwargs.get('next_link', None)
+
+
+class PaginatedDatasetList(msrest.serialization.Model):
+    """A paginated list of Datasets.
+
+    :ivar value: An array of objects of type Dataset.
+    :vartype value: list[~azure.mgmt.machinelearningservices.models.Dataset]
+    :ivar continuation_token: The token used in retrieving the next page. If null, there are no
+     additional pages.
+    :vartype continuation_token: str
+    :ivar next_link: The link to the next page constructed using the continuationToken.  If null,
+     there are no additional pages.
+    :vartype next_link: str
+    """
+
+    _attribute_map = {
+        'value': {'key': 'value', 'type': '[Dataset]'},
+        'continuation_token': {'key': 'continuationToken', 'type': 'str'},
+        'next_link': {'key': 'nextLink', 'type': 'str'},
+    }
+
+    def __init__(
+        self,
+        **kwargs
+    ):
+        """
+        :keyword value: An array of objects of type Dataset.
+        :paramtype value: list[~azure.mgmt.machinelearningservices.models.Dataset]
+        :keyword continuation_token: The token used in retrieving the next page. If null, there are no
+         additional pages.
+        :paramtype continuation_token: str
+        :keyword next_link: The link to the next page constructed using the continuationToken.  If
+         null, there are no additional pages.
+        :paramtype next_link: str
+        """
+        super(PaginatedDatasetList, self).__init__(**kwargs)
+        self.value = kwargs.get('value', None)
+        self.continuation_token = kwargs.get('continuation_token', None)
+        self.next_link = kwargs.get('next_link', None)
+
+
+class PaginatedDatasetV2List(msrest.serialization.Model):
+    """A paginated list of DatasetV2s.
+
+    :ivar value: An array of objects of type DatasetV2.
+    :vartype value: list[~azure.mgmt.machinelearningservices.models.DatasetV2]
+    :ivar continuation_token: The token used in retrieving the next page. If null, there are no
+     additional pages.
+    :vartype continuation_token: str
+    :ivar next_link: The link to the next page constructed using the continuationToken.  If null,
+     there are no additional pages.
+    :vartype next_link: str
+    """
+
+    _attribute_map = {
+        'value': {'key': 'value', 'type': '[DatasetV2]'},
+        'continuation_token': {'key': 'continuationToken', 'type': 'str'},
+        'next_link': {'key': 'nextLink', 'type': 'str'},
+    }
+
+    def __init__(
+        self,
+        **kwargs
+    ):
+        """
+        :keyword value: An array of objects of type DatasetV2.
+        :paramtype value: list[~azure.mgmt.machinelearningservices.models.DatasetV2]
+        :keyword continuation_token: The token used in retrieving the next page. If null, there are no
+         additional pages.
+        :paramtype continuation_token: str
+        :keyword next_link: The link to the next page constructed using the continuationToken.  If
+         null, there are no additional pages.
+        :paramtype next_link: str
+        """
+        super(PaginatedDatasetV2List, self).__init__(**kwargs)
+        self.value = kwargs.get('value', None)
+        self.continuation_token = kwargs.get('continuation_token', None)
+        self.next_link = kwargs.get('next_link', None)
+
+
+class PaginatedDataVersionEntityList(msrest.serialization.Model):
+    """A paginated list of DataVersionEntitys.
+
+    :ivar value: An array of objects of type DataVersionEntity.
+    :vartype value: list[~azure.mgmt.machinelearningservices.models.DataVersionEntity]
+    :ivar continuation_token: The token used in retrieving the next page. If null, there are no
+     additional pages.
+    :vartype continuation_token: str
+    :ivar next_link: The link to the next page constructed using the continuationToken.  If null,
+     there are no additional pages.
+    :vartype next_link: str
+    """
+
+    _attribute_map = {
+        'value': {'key': 'value', 'type': '[DataVersionEntity]'},
+        'continuation_token': {'key': 'continuationToken', 'type': 'str'},
+        'next_link': {'key': 'nextLink', 'type': 'str'},
+    }
+
+    def __init__(
+        self,
+        **kwargs
+    ):
+        """
+        :keyword value: An array of objects of type DataVersionEntity.
+        :paramtype value: list[~azure.mgmt.machinelearningservices.models.DataVersionEntity]
+        :keyword continuation_token: The token used in retrieving the next page. If null, there are no
+         additional pages.
+        :paramtype continuation_token: str
+        :keyword next_link: The link to the next page constructed using the continuationToken.  If
+         null, there are no additional pages.
+        :paramtype next_link: str
+        """
+        super(PaginatedDataVersionEntityList, self).__init__(**kwargs)
+        self.value = kwargs.get('value', None)
+        self.continuation_token = kwargs.get('continuation_token', None)
+        self.next_link = kwargs.get('next_link', None)
+
+
+class PaginatedStringList(msrest.serialization.Model):
+    """A paginated list of Strings.
+
+    :ivar value: An array of objects of type String.
+    :vartype value: list[str]
+    :ivar continuation_token: The token used in retrieving the next page. If null, there are no
+     additional pages.
+    :vartype continuation_token: str
+    :ivar next_link: The link to the next page constructed using the continuationToken.  If null,
+     there are no additional pages.
+    :vartype next_link: str
+    """
+
+    _attribute_map = {
+        'value': {'key': 'value', 'type': '[str]'},
+        'continuation_token': {'key': 'continuationToken', 'type': 'str'},
+        'next_link': {'key': 'nextLink', 'type': 'str'},
+    }
+
+    def __init__(
+        self,
+        **kwargs
+    ):
+        """
+        :keyword value: An array of objects of type String.
+        :paramtype value: list[str]
+        :keyword continuation_token: The token used in retrieving the next page. If null, there are no
+         additional pages.
+        :paramtype continuation_token: str
+        :keyword next_link: The link to the next page constructed using the continuationToken.  If
+         null, there are no additional pages.
+        :paramtype next_link: str
+        """
+        super(PaginatedStringList, self).__init__(**kwargs)
+        self.value = kwargs.get('value', None)
+        self.continuation_token = kwargs.get('continuation_token', None)
+        self.next_link = kwargs.get('next_link', None)
+
+
+class ProfileActionResult(msrest.serialization.Model):
+    """ProfileActionResult.
+
+    :ivar profile_action_id:
+    :vartype profile_action_id: str
+    :ivar status:
+    :vartype status: str
+    :ivar completed_on_utc:
+    :vartype completed_on_utc: ~datetime.datetime
+    :ivar action_result:
+    :vartype action_result: ~azure.mgmt.machinelearningservices.models.ActionResult
+    """
+
+    _attribute_map = {
+        'profile_action_id': {'key': 'profileActionId', 'type': 'str'},
+        'status': {'key': 'status', 'type': 'str'},
+        'completed_on_utc': {'key': 'completedOnUtc', 'type': 'iso-8601'},
+        'action_result': {'key': 'actionResult', 'type': 'ActionResult'},
+    }
+
+    def __init__(
+        self,
+        **kwargs
+    ):
+        """
+        :keyword profile_action_id:
+        :paramtype profile_action_id: str
+        :keyword status:
+        :paramtype status: str
+        :keyword completed_on_utc:
+        :paramtype completed_on_utc: ~datetime.datetime
+        :keyword action_result:
+        :paramtype action_result: ~azure.mgmt.machinelearningservices.models.ActionResult
+        """
+        super(ProfileActionResult, self).__init__(**kwargs)
+        self.profile_action_id = kwargs.get('profile_action_id', None)
+        self.status = kwargs.get('status', None)
+        self.completed_on_utc = kwargs.get('completed_on_utc', None)
+        self.action_result = kwargs.get('action_result', None)
+
+
+class ProfileResult(msrest.serialization.Model):
+    """ProfileResult.
+
+    :ivar column_name:
+    :vartype column_name: str
+    :ivar type: Possible values include: "String", "Boolean", "Integer", "Decimal", "Date",
+     "Unknown", "Error", "Null", "DataRow", "List", "Stream".
+    :vartype type: str or ~azure.mgmt.machinelearningservices.models.FieldType
+    :ivar min:
+    :vartype min: ~azure.mgmt.machinelearningservices.models.DataField
+    :ivar max:
+    :vartype max: ~azure.mgmt.machinelearningservices.models.DataField
+    :ivar count:
+    :vartype count: long
+    :ivar missing_count:
+    :vartype missing_count: long
+    :ivar not_missing_count:
+    :vartype not_missing_count: long
+    :ivar percent_missing:
+    :vartype percent_missing: float
+    :ivar error_count:
+    :vartype error_count: long
+    :ivar empty_count:
+    :vartype empty_count: long
+    :ivar quantiles:
+    :vartype quantiles: ~azure.mgmt.machinelearningservices.models.Quantiles
+    :ivar whisker_top:
+    :vartype whisker_top: float
+    :ivar whisker_bottom:
+    :vartype whisker_bottom: float
+    :ivar moments:
+    :vartype moments: ~azure.mgmt.machinelearningservices.models.Moments
+    :ivar type_counts:
+    :vartype type_counts: list[~azure.mgmt.machinelearningservices.models.TypeCount]
+    :ivar value_counts:
+    :vartype value_counts: list[~azure.mgmt.machinelearningservices.models.ValueCount]
+    :ivar unique_values:
+    :vartype unique_values: long
+    :ivar histogram:
+    :vartype histogram: list[~azure.mgmt.machinelearningservices.models.HistogramBin]
+    :ivar s_type_counts:
+    :vartype s_type_counts: list[~azure.mgmt.machinelearningservices.models.STypeCount]
+    :ivar average_spaces_count:
+    :vartype average_spaces_count: float
+    :ivar string_lengths:
+    :vartype string_lengths: list[~azure.mgmt.machinelearningservices.models.StringLengthCount]
+    """
+
+    _attribute_map = {
+        'column_name': {'key': 'columnName', 'type': 'str'},
+        'type': {'key': 'type', 'type': 'str'},
+        'min': {'key': 'min', 'type': 'DataField'},
+        'max': {'key': 'max', 'type': 'DataField'},
+        'count': {'key': 'count', 'type': 'long'},
+        'missing_count': {'key': 'missingCount', 'type': 'long'},
+        'not_missing_count': {'key': 'notMissingCount', 'type': 'long'},
+        'percent_missing': {'key': 'percentMissing', 'type': 'float'},
+        'error_count': {'key': 'errorCount', 'type': 'long'},
+        'empty_count': {'key': 'emptyCount', 'type': 'long'},
+        'quantiles': {'key': 'quantiles', 'type': 'Quantiles'},
+        'whisker_top': {'key': 'whiskerTop', 'type': 'float'},
+        'whisker_bottom': {'key': 'whiskerBottom', 'type': 'float'},
+        'moments': {'key': 'moments', 'type': 'Moments'},
+        'type_counts': {'key': 'typeCounts', 'type': '[TypeCount]'},
+        'value_counts': {'key': 'valueCounts', 'type': '[ValueCount]'},
+        'unique_values': {'key': 'uniqueValues', 'type': 'long'},
+        'histogram': {'key': 'histogram', 'type': '[HistogramBin]'},
+        's_type_counts': {'key': 'sTypeCounts', 'type': '[STypeCount]'},
+        'average_spaces_count': {'key': 'averageSpacesCount', 'type': 'float'},
+        'string_lengths': {'key': 'stringLengths', 'type': '[StringLengthCount]'},
+    }
+
+    def __init__(
+        self,
+        **kwargs
+    ):
+        """
+        :keyword column_name:
+        :paramtype column_name: str
+        :keyword type: Possible values include: "String", "Boolean", "Integer", "Decimal", "Date",
+         "Unknown", "Error", "Null", "DataRow", "List", "Stream".
+        :paramtype type: str or ~azure.mgmt.machinelearningservices.models.FieldType
+        :keyword min:
+        :paramtype min: ~azure.mgmt.machinelearningservices.models.DataField
+        :keyword max:
+        :paramtype max: ~azure.mgmt.machinelearningservices.models.DataField
+        :keyword count:
+        :paramtype count: long
+        :keyword missing_count:
+        :paramtype missing_count: long
+        :keyword not_missing_count:
+        :paramtype not_missing_count: long
+        :keyword percent_missing:
+        :paramtype percent_missing: float
+        :keyword error_count:
+        :paramtype error_count: long
+        :keyword empty_count:
+        :paramtype empty_count: long
+        :keyword quantiles:
+        :paramtype quantiles: ~azure.mgmt.machinelearningservices.models.Quantiles
+        :keyword whisker_top:
+        :paramtype whisker_top: float
+        :keyword whisker_bottom:
+        :paramtype whisker_bottom: float
+        :keyword moments:
+        :paramtype moments: ~azure.mgmt.machinelearningservices.models.Moments
+        :keyword type_counts:
+        :paramtype type_counts: list[~azure.mgmt.machinelearningservices.models.TypeCount]
+        :keyword value_counts:
+        :paramtype value_counts: list[~azure.mgmt.machinelearningservices.models.ValueCount]
+        :keyword unique_values:
+        :paramtype unique_values: long
+        :keyword histogram:
+        :paramtype histogram: list[~azure.mgmt.machinelearningservices.models.HistogramBin]
+        :keyword s_type_counts:
+        :paramtype s_type_counts: list[~azure.mgmt.machinelearningservices.models.STypeCount]
+        :keyword average_spaces_count:
+        :paramtype average_spaces_count: float
+        :keyword string_lengths:
+        :paramtype string_lengths: list[~azure.mgmt.machinelearningservices.models.StringLengthCount]
+        """
+        super(ProfileResult, self).__init__(**kwargs)
+        self.column_name = kwargs.get('column_name', None)
+        self.type = kwargs.get('type', None)
+        self.min = kwargs.get('min', None)
+        self.max = kwargs.get('max', None)
+        self.count = kwargs.get('count', None)
+        self.missing_count = kwargs.get('missing_count', None)
+        self.not_missing_count = kwargs.get('not_missing_count', None)
+        self.percent_missing = kwargs.get('percent_missing', None)
+        self.error_count = kwargs.get('error_count', None)
+        self.empty_count = kwargs.get('empty_count', None)
+        self.quantiles = kwargs.get('quantiles', None)
+        self.whisker_top = kwargs.get('whisker_top', None)
+        self.whisker_bottom = kwargs.get('whisker_bottom', None)
+        self.moments = kwargs.get('moments', None)
+        self.type_counts = kwargs.get('type_counts', None)
+        self.value_counts = kwargs.get('value_counts', None)
+        self.unique_values = kwargs.get('unique_values', None)
+        self.histogram = kwargs.get('histogram', None)
+        self.s_type_counts = kwargs.get('s_type_counts', None)
+        self.average_spaces_count = kwargs.get('average_spaces_count', None)
+        self.string_lengths = kwargs.get('string_lengths', None)
+
+
+class Quantiles(msrest.serialization.Model):
+    """Quantiles.
+
+    :ivar p0_d1:
+    :vartype p0_d1: float
+    :ivar p1:
+    :vartype p1: float
+    :ivar p5:
+    :vartype p5: float
+    :ivar p25:
+    :vartype p25: float
+    :ivar p50:
+    :vartype p50: float
+    :ivar p75:
+    :vartype p75: float
+    :ivar p95:
+    :vartype p95: float
+    :ivar p99:
+    :vartype p99: float
+    :ivar p99_d9:
+    :vartype p99_d9: float
+    """
+
+    _attribute_map = {
+        'p0_d1': {'key': 'p0D1', 'type': 'float'},
+        'p1': {'key': 'p1', 'type': 'float'},
+        'p5': {'key': 'p5', 'type': 'float'},
+        'p25': {'key': 'p25', 'type': 'float'},
+        'p50': {'key': 'p50', 'type': 'float'},
+        'p75': {'key': 'p75', 'type': 'float'},
+        'p95': {'key': 'p95', 'type': 'float'},
+        'p99': {'key': 'p99', 'type': 'float'},
+        'p99_d9': {'key': 'p99D9', 'type': 'float'},
+    }
+
+    def __init__(
+        self,
+        **kwargs
+    ):
+        """
+        :keyword p0_d1:
+        :paramtype p0_d1: float
+        :keyword p1:
+        :paramtype p1: float
+        :keyword p5:
+        :paramtype p5: float
+        :keyword p25:
+        :paramtype p25: float
+        :keyword p50:
+        :paramtype p50: float
+        :keyword p75:
+        :paramtype p75: float
+        :keyword p95:
+        :paramtype p95: float
+        :keyword p99:
+        :paramtype p99: float
+        :keyword p99_d9:
+        :paramtype p99_d9: float
+        """
+        super(Quantiles, self).__init__(**kwargs)
+        self.p0_d1 = kwargs.get('p0_d1', None)
+        self.p1 = kwargs.get('p1', None)
+        self.p5 = kwargs.get('p5', None)
+        self.p25 = kwargs.get('p25', None)
+        self.p50 = kwargs.get('p50', None)
+        self.p75 = kwargs.get('p75', None)
+        self.p95 = kwargs.get('p95', None)
+        self.p99 = kwargs.get('p99', None)
+        self.p99_d9 = kwargs.get('p99_d9', None)
+
+
+class RegisterExistingData(msrest.serialization.Model):
+    """RegisterExistingData.
+
+    All required parameters must be populated in order to send to Azure.
+
+    :ivar existing_unregistered_asset_id: Required.
+    :vartype existing_unregistered_asset_id: str
+    :ivar name: Required.
+    :vartype name: str
+    :ivar version:
+    :vartype version: str
+    """
+
+    _validation = {
+        'existing_unregistered_asset_id': {'required': True},
+        'name': {'required': True},
+    }
+
+    _attribute_map = {
+        'existing_unregistered_asset_id': {'key': 'existingUnregisteredAssetId', 'type': 'str'},
+        'name': {'key': 'name', 'type': 'str'},
+        'version': {'key': 'version', 'type': 'str'},
+    }
+
+    def __init__(
+        self,
+        **kwargs
+    ):
+        """
+        :keyword existing_unregistered_asset_id: Required.
+        :paramtype existing_unregistered_asset_id: str
+        :keyword name: Required.
+        :paramtype name: str
+        :keyword version:
+        :paramtype version: str
+        """
+        super(RegisterExistingData, self).__init__(**kwargs)
+        self.existing_unregistered_asset_id = kwargs['existing_unregistered_asset_id']
+        self.name = kwargs['name']
+        self.version = kwargs.get('version', None)
+
+
+class RootError(msrest.serialization.Model):
+    """The root error.
+
+    :ivar code: The service-defined error code. Supported error codes: ServiceError, UserError,
+     ValidationError, AzureStorageError, TransientError, RequestThrottled.
+    :vartype code: str
+    :ivar severity: The Severity of error.
+    :vartype severity: int
+    :ivar message: A human-readable representation of the error.
+    :vartype message: str
+    :ivar message_format: An unformatted version of the message with no variable substitution.
+    :vartype message_format: str
+    :ivar message_parameters: Value substitutions corresponding to the contents of MessageFormat.
+    :vartype message_parameters: dict[str, str]
+    :ivar reference_code: This code can optionally be set by the system generating the error.
+     It should be used to classify the problem and identify the module and code area where the
+     failure occured.
+    :vartype reference_code: str
+    :ivar details_uri: A URI which points to more details about the context of the error.
+    :vartype details_uri: str
+    :ivar target: The target of the error (e.g., the name of the property in error).
+    :vartype target: str
+    :ivar details: The related errors that occurred during the request.
+    :vartype details: list[~azure.mgmt.machinelearningservices.models.RootError]
+    :ivar inner_error: A nested structure of errors.
+    :vartype inner_error: ~azure.mgmt.machinelearningservices.models.InnerErrorResponse
+    :ivar additional_info: The error additional info.
+    :vartype additional_info: list[~azure.mgmt.machinelearningservices.models.ErrorAdditionalInfo]
+    """
+
+    _attribute_map = {
+        'code': {'key': 'code', 'type': 'str'},
+        'severity': {'key': 'severity', 'type': 'int'},
+        'message': {'key': 'message', 'type': 'str'},
+        'message_format': {'key': 'messageFormat', 'type': 'str'},
+        'message_parameters': {'key': 'messageParameters', 'type': '{str}'},
+        'reference_code': {'key': 'referenceCode', 'type': 'str'},
+        'details_uri': {'key': 'detailsUri', 'type': 'str'},
+        'target': {'key': 'target', 'type': 'str'},
+        'details': {'key': 'details', 'type': '[RootError]'},
+        'inner_error': {'key': 'innerError', 'type': 'InnerErrorResponse'},
+        'additional_info': {'key': 'additionalInfo', 'type': '[ErrorAdditionalInfo]'},
+    }
+
+    def __init__(
+        self,
+        **kwargs
+    ):
+        """
+        :keyword code: The service-defined error code. Supported error codes: ServiceError, UserError,
+         ValidationError, AzureStorageError, TransientError, RequestThrottled.
+        :paramtype code: str
+        :keyword severity: The Severity of error.
+        :paramtype severity: int
+        :keyword message: A human-readable representation of the error.
+        :paramtype message: str
+        :keyword message_format: An unformatted version of the message with no variable substitution.
+        :paramtype message_format: str
+        :keyword message_parameters: Value substitutions corresponding to the contents of
+         MessageFormat.
+        :paramtype message_parameters: dict[str, str]
+        :keyword reference_code: This code can optionally be set by the system generating the error.
+         It should be used to classify the problem and identify the module and code area where the
+         failure occured.
+        :paramtype reference_code: str
+        :keyword details_uri: A URI which points to more details about the context of the error.
+        :paramtype details_uri: str
+        :keyword target: The target of the error (e.g., the name of the property in error).
+        :paramtype target: str
+        :keyword details: The related errors that occurred during the request.
+        :paramtype details: list[~azure.mgmt.machinelearningservices.models.RootError]
+        :keyword inner_error: A nested structure of errors.
+        :paramtype inner_error: ~azure.mgmt.machinelearningservices.models.InnerErrorResponse
+        :keyword additional_info: The error additional info.
+        :paramtype additional_info:
+         list[~azure.mgmt.machinelearningservices.models.ErrorAdditionalInfo]
+        """
+        super(RootError, self).__init__(**kwargs)
+        self.code = kwargs.get('code', None)
+        self.severity = kwargs.get('severity', None)
+        self.message = kwargs.get('message', None)
+        self.message_format = kwargs.get('message_format', None)
+        self.message_parameters = kwargs.get('message_parameters', None)
+        self.reference_code = kwargs.get('reference_code', None)
+        self.details_uri = kwargs.get('details_uri', None)
+        self.target = kwargs.get('target', None)
+        self.details = kwargs.get('details', None)
+        self.inner_error = kwargs.get('inner_error', None)
+        self.additional_info = kwargs.get('additional_info', None)
+
+
+class SqlDataPath(msrest.serialization.Model):
+    """SqlDataPath.
+
+    :ivar sql_table_name:
+    :vartype sql_table_name: str
+    :ivar sql_query:
+    :vartype sql_query: str
+    :ivar sql_stored_procedure_name:
+    :vartype sql_stored_procedure_name: str
+    :ivar sql_stored_procedure_params:
+    :vartype sql_stored_procedure_params:
+     list[~azure.mgmt.machinelearningservices.models.StoredProcedureParameter]
+    :ivar query_timeout:
+    :vartype query_timeout: long
+    """
+
+    _attribute_map = {
+        'sql_table_name': {'key': 'sqlTableName', 'type': 'str'},
+        'sql_query': {'key': 'sqlQuery', 'type': 'str'},
+        'sql_stored_procedure_name': {'key': 'sqlStoredProcedureName', 'type': 'str'},
+        'sql_stored_procedure_params': {'key': 'sqlStoredProcedureParams', 'type': '[StoredProcedureParameter]'},
+        'query_timeout': {'key': 'queryTimeout', 'type': 'long'},
+    }
+
+    def __init__(
+        self,
+        **kwargs
+    ):
+        """
+        :keyword sql_table_name:
+        :paramtype sql_table_name: str
+        :keyword sql_query:
+        :paramtype sql_query: str
+        :keyword sql_stored_procedure_name:
+        :paramtype sql_stored_procedure_name: str
+        :keyword sql_stored_procedure_params:
+        :paramtype sql_stored_procedure_params:
+         list[~azure.mgmt.machinelearningservices.models.StoredProcedureParameter]
+        :keyword query_timeout:
+        :paramtype query_timeout: long
+        """
+        super(SqlDataPath, self).__init__(**kwargs)
+        self.sql_table_name = kwargs.get('sql_table_name', None)
+        self.sql_query = kwargs.get('sql_query', None)
+        self.sql_stored_procedure_name = kwargs.get('sql_stored_procedure_name', None)
+        self.sql_stored_procedure_params = kwargs.get('sql_stored_procedure_params', None)
+        self.query_timeout = kwargs.get('query_timeout', None)
+
+
+class StoredProcedureParameter(msrest.serialization.Model):
+    """StoredProcedureParameter.
+
+    :ivar name:
+    :vartype name: str
+    :ivar value:
+    :vartype value: str
+    :ivar type: Possible values include: "String", "Int", "Decimal", "Guid", "Boolean", "Date".
+    :vartype type: str or ~azure.mgmt.machinelearningservices.models.StoredProcedureParameterType
+    """
+
+    _attribute_map = {
+        'name': {'key': 'name', 'type': 'str'},
+        'value': {'key': 'value', 'type': 'str'},
+        'type': {'key': 'type', 'type': 'str'},
+    }
+
+    def __init__(
+        self,
+        **kwargs
+    ):
+        """
+        :keyword name:
+        :paramtype name: str
+        :keyword value:
+        :paramtype value: str
+        :keyword type: Possible values include: "String", "Int", "Decimal", "Guid", "Boolean", "Date".
+        :paramtype type: str or ~azure.mgmt.machinelearningservices.models.StoredProcedureParameterType
+        """
+        super(StoredProcedureParameter, self).__init__(**kwargs)
+        self.name = kwargs.get('name', None)
+        self.value = kwargs.get('value', None)
+        self.type = kwargs.get('type', None)
+
+
+class StringLengthCount(msrest.serialization.Model):
+    """StringLengthCount.
+
+    :ivar length:
+    :vartype length: long
+    :ivar count:
+    :vartype count: long
+    """
+
+    _attribute_map = {
+        'length': {'key': 'length', 'type': 'long'},
+        'count': {'key': 'count', 'type': 'long'},
+    }
+
+    def __init__(
+        self,
+        **kwargs
+    ):
+        """
+        :keyword length:
+        :paramtype length: long
+        :keyword count:
+        :paramtype count: long
+        """
+        super(StringLengthCount, self).__init__(**kwargs)
+        self.length = kwargs.get('length', None)
+        self.count = kwargs.get('count', None)
+
+
+class STypeCount(msrest.serialization.Model):
+    """STypeCount.
+
+    :ivar s_type: Possible values include: "EmailAddress", "GeographicCoordinate", "Ipv4Address",
+     "Ipv6Address", "UsPhoneNumber", "ZipCode".
+    :vartype s_type: str or ~azure.mgmt.machinelearningservices.models.SType
+    :ivar count:
+    :vartype count: long
+    """
+
+    _attribute_map = {
+        's_type': {'key': 'sType', 'type': 'str'},
+        'count': {'key': 'count', 'type': 'long'},
+    }
+
+    def __init__(
+        self,
+        **kwargs
+    ):
+        """
+        :keyword s_type: Possible values include: "EmailAddress", "GeographicCoordinate",
+         "Ipv4Address", "Ipv6Address", "UsPhoneNumber", "ZipCode".
+        :paramtype s_type: str or ~azure.mgmt.machinelearningservices.models.SType
+        :keyword count:
+        :paramtype count: long
+        """
+        super(STypeCount, self).__init__(**kwargs)
+        self.s_type = kwargs.get('s_type', None)
+        self.count = kwargs.get('count', None)
+
+
+class TypeCount(msrest.serialization.Model):
+    """TypeCount.
+
+    :ivar type: Possible values include: "String", "Boolean", "Integer", "Decimal", "Date",
+     "Unknown", "Error", "Null", "DataRow", "List", "Stream".
+    :vartype type: str or ~azure.mgmt.machinelearningservices.models.FieldType
+    :ivar count:
+    :vartype count: long
+    """
+
+    _attribute_map = {
+        'type': {'key': 'type', 'type': 'str'},
+        'count': {'key': 'count', 'type': 'long'},
+    }
+
+    def __init__(
+        self,
+        **kwargs
+    ):
+        """
+        :keyword type: Possible values include: "String", "Boolean", "Integer", "Decimal", "Date",
+         "Unknown", "Error", "Null", "DataRow", "List", "Stream".
+        :paramtype type: str or ~azure.mgmt.machinelearningservices.models.FieldType
+        :keyword count:
+        :paramtype count: long
+        """
+        super(TypeCount, self).__init__(**kwargs)
+        self.type = kwargs.get('type', None)
+        self.count = kwargs.get('count', None)
+
+
+class User(msrest.serialization.Model):
+    """User.
+
+    :ivar user_object_id: A user or service principal's object ID.
+     This is EUPI and may only be logged to warm path telemetry.
+    :vartype user_object_id: str
+    :ivar user_pu_id: A user or service principal's PuID.
+     This is PII and should never be logged.
+    :vartype user_pu_id: str
+    :ivar user_idp: A user identity provider. Eg live.com
+     This is PII and should never be logged.
+    :vartype user_idp: str
+    :ivar user_alt_sec_id: A user alternate sec id. This represents the user in a different
+     identity provider system Eg.1:live.com:puid
+     This is PII and should never be logged.
+    :vartype user_alt_sec_id: str
+    :ivar user_iss: The issuer which issed the token for this user.
+     This is PII and should never be logged.
+    :vartype user_iss: str
+    :ivar user_tenant_id: A user or service principal's tenant ID.
+    :vartype user_tenant_id: str
+    :ivar user_name: A user's full name or a service principal's app ID.
+     This is PII and should never be logged.
+    :vartype user_name: str
+    :ivar upn: A user's Principal name (upn)
+     This is PII andshould never be logged.
+    :vartype upn: str
+    """
+
+    _attribute_map = {
+        'user_object_id': {'key': 'userObjectId', 'type': 'str'},
+        'user_pu_id': {'key': 'userPuId', 'type': 'str'},
+        'user_idp': {'key': 'userIdp', 'type': 'str'},
+        'user_alt_sec_id': {'key': 'userAltSecId', 'type': 'str'},
+        'user_iss': {'key': 'userIss', 'type': 'str'},
+        'user_tenant_id': {'key': 'userTenantId', 'type': 'str'},
+        'user_name': {'key': 'userName', 'type': 'str'},
+        'upn': {'key': 'upn', 'type': 'str'},
+    }
+
+    def __init__(
+        self,
+        **kwargs
+    ):
+        """
+        :keyword user_object_id: A user or service principal's object ID.
+         This is EUPI and may only be logged to warm path telemetry.
+        :paramtype user_object_id: str
+        :keyword user_pu_id: A user or service principal's PuID.
+         This is PII and should never be logged.
+        :paramtype user_pu_id: str
+        :keyword user_idp: A user identity provider. Eg live.com
+         This is PII and should never be logged.
+        :paramtype user_idp: str
+        :keyword user_alt_sec_id: A user alternate sec id. This represents the user in a different
+         identity provider system Eg.1:live.com:puid
+         This is PII and should never be logged.
+        :paramtype user_alt_sec_id: str
+        :keyword user_iss: The issuer which issed the token for this user.
+         This is PII and should never be logged.
+        :paramtype user_iss: str
+        :keyword user_tenant_id: A user or service principal's tenant ID.
+        :paramtype user_tenant_id: str
+        :keyword user_name: A user's full name or a service principal's app ID.
+         This is PII and should never be logged.
+        :paramtype user_name: str
+        :keyword upn: A user's Principal name (upn)
+         This is PII andshould never be logged.
+        :paramtype upn: str
+        """
+        super(User, self).__init__(**kwargs)
+        self.user_object_id = kwargs.get('user_object_id', None)
+        self.user_pu_id = kwargs.get('user_pu_id', None)
+        self.user_idp = kwargs.get('user_idp', None)
+        self.user_alt_sec_id = kwargs.get('user_alt_sec_id', None)
+        self.user_iss = kwargs.get('user_iss', None)
+        self.user_tenant_id = kwargs.get('user_tenant_id', None)
+        self.user_name = kwargs.get('user_name', None)
+        self.upn = kwargs.get('upn', None)
+
+
+class ValueCount(msrest.serialization.Model):
+    """ValueCount.
+
+    :ivar value:
+    :vartype value: ~azure.mgmt.machinelearningservices.models.DataField
+    :ivar count:
+    :vartype count: long
+    """
+
+    _attribute_map = {
+        'value': {'key': 'value', 'type': 'DataField'},
+        'count': {'key': 'count', 'type': 'long'},
+    }
+
+    def __init__(
+        self,
+        **kwargs
+    ):
+        """
+        :keyword value:
+        :paramtype value: ~azure.mgmt.machinelearningservices.models.DataField
+        :keyword count:
+        :paramtype count: long
+        """
+        super(ValueCount, self).__init__(**kwargs)
+        self.value = kwargs.get('value', None)
+        self.count = kwargs.get('count', None)
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/dataset_dataplane/models/_models_py3.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/dataset_dataplane/models/_models_py3.py
new file mode 100644
index 00000000..159fd205
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/dataset_dataplane/models/_models_py3.py
@@ -0,0 +1,2916 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+import datetime
+from typing import Any, Dict, List, Optional, Union
+
+from azure.core.exceptions import HttpResponseError
+import msrest.serialization
+
+from ._azure_machine_learning_workspaces_enums import *
+
+
+class ActionResult(msrest.serialization.Model):
+    """ActionResult.
+
+    :ivar is_up_to_date:
+    :vartype is_up_to_date: bool
+    :ivar is_up_to_date_error:
+    :vartype is_up_to_date_error: str
+    :ivar result_artifact_ids:
+    :vartype result_artifact_ids: list[str]
+    :ivar in_progress_action_id:
+    :vartype in_progress_action_id: str
+    :ivar run_id:
+    :vartype run_id: str
+    :ivar experiment_name:
+    :vartype experiment_name: str
+    :ivar datastore_name:
+    :vartype datastore_name: str
+    """
+
+    _attribute_map = {
+        'is_up_to_date': {'key': 'isUpToDate', 'type': 'bool'},
+        'is_up_to_date_error': {'key': 'isUpToDateError', 'type': 'str'},
+        'result_artifact_ids': {'key': 'resultArtifactIds', 'type': '[str]'},
+        'in_progress_action_id': {'key': 'inProgressActionId', 'type': 'str'},
+        'run_id': {'key': 'runId', 'type': 'str'},
+        'experiment_name': {'key': 'experimentName', 'type': 'str'},
+        'datastore_name': {'key': 'datastoreName', 'type': 'str'},
+    }
+
+    def __init__(
+        self,
+        *,
+        is_up_to_date: Optional[bool] = None,
+        is_up_to_date_error: Optional[str] = None,
+        result_artifact_ids: Optional[List[str]] = None,
+        in_progress_action_id: Optional[str] = None,
+        run_id: Optional[str] = None,
+        experiment_name: Optional[str] = None,
+        datastore_name: Optional[str] = None,
+        **kwargs
+    ):
+        """
+        :keyword is_up_to_date:
+        :paramtype is_up_to_date: bool
+        :keyword is_up_to_date_error:
+        :paramtype is_up_to_date_error: str
+        :keyword result_artifact_ids:
+        :paramtype result_artifact_ids: list[str]
+        :keyword in_progress_action_id:
+        :paramtype in_progress_action_id: str
+        :keyword run_id:
+        :paramtype run_id: str
+        :keyword experiment_name:
+        :paramtype experiment_name: str
+        :keyword datastore_name:
+        :paramtype datastore_name: str
+        """
+        super(ActionResult, self).__init__(**kwargs)
+        self.is_up_to_date = is_up_to_date
+        self.is_up_to_date_error = is_up_to_date_error
+        self.result_artifact_ids = result_artifact_ids
+        self.in_progress_action_id = in_progress_action_id
+        self.run_id = run_id
+        self.experiment_name = experiment_name
+        self.datastore_name = datastore_name
+
+
+class AssetId(msrest.serialization.Model):
+    """AssetId.
+
+    All required parameters must be populated in order to send to Azure.
+
+    :ivar value: Required.
+    :vartype value: str
+    """
+
+    _validation = {
+        'value': {'required': True},
+    }
+
+    _attribute_map = {
+        'value': {'key': 'value', 'type': 'str'},
+    }
+
+    def __init__(
+        self,
+        *,
+        value: str,
+        **kwargs
+    ):
+        """
+        :keyword value: Required.
+        :paramtype value: str
+        """
+        super(AssetId, self).__init__(**kwargs)
+        self.value = value
+
+
+class BatchDataUriResponse(msrest.serialization.Model):
+    """BatchDataUriResponse.
+
+    :ivar values: Dictionary of :code:`<DataUriV2Response>`.
+    :vartype values: dict[str, ~azure.mgmt.machinelearningservices.models.DataUriV2Response]
+    """
+
+    _attribute_map = {
+        'values': {'key': 'values', 'type': '{DataUriV2Response}'},
+    }
+
+    def __init__(
+        self,
+        *,
+        values: Optional[Dict[str, "DataUriV2Response"]] = None,
+        **kwargs
+    ):
+        """
+        :keyword values: Dictionary of :code:`<DataUriV2Response>`.
+        :paramtype values: dict[str, ~azure.mgmt.machinelearningservices.models.DataUriV2Response]
+        """
+        super(BatchDataUriResponse, self).__init__(**kwargs)
+        self.values = values
+
+
+class BatchGetResolvedURIs(msrest.serialization.Model):
+    """BatchGetResolvedURIs.
+
+    All required parameters must be populated in order to send to Azure.
+
+    :ivar values: Required.
+    :vartype values: list[str]
+    """
+
+    _validation = {
+        'values': {'required': True},
+    }
+
+    _attribute_map = {
+        'values': {'key': 'values', 'type': '[str]'},
+    }
+
+    def __init__(
+        self,
+        *,
+        values: List[str],
+        **kwargs
+    ):
+        """
+        :keyword values: Required.
+        :paramtype values: list[str]
+        """
+        super(BatchGetResolvedURIs, self).__init__(**kwargs)
+        self.values = values
+
+
+class ColumnDefinition(msrest.serialization.Model):
+    """ColumnDefinition.
+
+    :ivar id:
+    :vartype id: str
+    :ivar type: Possible values include: "String", "Boolean", "Integer", "Decimal", "Date",
+     "Unknown", "Error", "Null", "DataRow", "List", "Stream".
+    :vartype type: str or ~azure.mgmt.machinelearningservices.models.FieldType
+    """
+
+    _attribute_map = {
+        'id': {'key': 'id', 'type': 'str'},
+        'type': {'key': 'type', 'type': 'str'},
+    }
+
+    def __init__(
+        self,
+        *,
+        id: Optional[str] = None,
+        type: Optional[Union[str, "FieldType"]] = None,
+        **kwargs
+    ):
+        """
+        :keyword id:
+        :paramtype id: str
+        :keyword type: Possible values include: "String", "Boolean", "Integer", "Decimal", "Date",
+         "Unknown", "Error", "Null", "DataRow", "List", "Stream".
+        :paramtype type: str or ~azure.mgmt.machinelearningservices.models.FieldType
+        """
+        super(ColumnDefinition, self).__init__(**kwargs)
+        self.id = id
+        self.type = type
+
+
+class CreateUnregisteredInputData(msrest.serialization.Model):
+    """CreateUnregisteredInputData.
+
+    All required parameters must be populated in order to send to Azure.
+
+    :ivar run_id: Required.
+    :vartype run_id: str
+    :ivar input_name: Required.
+    :vartype input_name: str
+    :ivar uri: Required.
+    :vartype uri: str
+    :ivar type: Required.
+    :vartype type: str
+    """
+
+    _validation = {
+        'run_id': {'required': True},
+        'input_name': {'required': True},
+        'uri': {'required': True},
+        'type': {'required': True},
+    }
+
+    _attribute_map = {
+        'run_id': {'key': 'runId', 'type': 'str'},
+        'input_name': {'key': 'inputName', 'type': 'str'},
+        'uri': {'key': 'uri', 'type': 'str'},
+        'type': {'key': 'type', 'type': 'str'},
+    }
+
+    def __init__(
+        self,
+        *,
+        run_id: str,
+        input_name: str,
+        uri: str,
+        type: str,
+        **kwargs
+    ):
+        """
+        :keyword run_id: Required.
+        :paramtype run_id: str
+        :keyword input_name: Required.
+        :paramtype input_name: str
+        :keyword uri: Required.
+        :paramtype uri: str
+        :keyword type: Required.
+        :paramtype type: str
+        """
+        super(CreateUnregisteredInputData, self).__init__(**kwargs)
+        self.run_id = run_id
+        self.input_name = input_name
+        self.uri = uri
+        self.type = type
+
+
+class CreateUnregisteredOutputData(msrest.serialization.Model):
+    """CreateUnregisteredOutputData.
+
+    All required parameters must be populated in order to send to Azure.
+
+    :ivar run_id: Required.
+    :vartype run_id: str
+    :ivar output_name: Required.
+    :vartype output_name: str
+    :ivar uri: Required.
+    :vartype uri: str
+    :ivar type: Required.
+    :vartype type: str
+    """
+
+    _validation = {
+        'run_id': {'required': True},
+        'output_name': {'required': True},
+        'uri': {'required': True},
+        'type': {'required': True},
+    }
+
+    _attribute_map = {
+        'run_id': {'key': 'runId', 'type': 'str'},
+        'output_name': {'key': 'outputName', 'type': 'str'},
+        'uri': {'key': 'uri', 'type': 'str'},
+        'type': {'key': 'type', 'type': 'str'},
+    }
+
+    def __init__(
+        self,
+        *,
+        run_id: str,
+        output_name: str,
+        uri: str,
+        type: str,
+        **kwargs
+    ):
+        """
+        :keyword run_id: Required.
+        :paramtype run_id: str
+        :keyword output_name: Required.
+        :paramtype output_name: str
+        :keyword uri: Required.
+        :paramtype uri: str
+        :keyword type: Required.
+        :paramtype type: str
+        """
+        super(CreateUnregisteredOutputData, self).__init__(**kwargs)
+        self.run_id = run_id
+        self.output_name = output_name
+        self.uri = uri
+        self.type = type
+
+
+class DataCallRequest(msrest.serialization.Model):
+    """DataCallRequest.
+
+    :ivar data_uri:
+    :vartype data_uri: str
+    :ivar data_type:
+    :vartype data_type: str
+    :ivar asset_id:
+    :vartype asset_id: str
+    :ivar data_container_name:
+    :vartype data_container_name: str
+    :ivar version_id:
+    :vartype version_id: str
+    """
+
+    _attribute_map = {
+        'data_uri': {'key': 'dataUri', 'type': 'str'},
+        'data_type': {'key': 'dataType', 'type': 'str'},
+        'asset_id': {'key': 'assetId', 'type': 'str'},
+        'data_container_name': {'key': 'dataContainerName', 'type': 'str'},
+        'version_id': {'key': 'versionId', 'type': 'str'},
+    }
+
+    def __init__(
+        self,
+        *,
+        data_uri: Optional[str] = None,
+        data_type: Optional[str] = None,
+        asset_id: Optional[str] = None,
+        data_container_name: Optional[str] = None,
+        version_id: Optional[str] = None,
+        **kwargs
+    ):
+        """
+        :keyword data_uri:
+        :paramtype data_uri: str
+        :keyword data_type:
+        :paramtype data_type: str
+        :keyword asset_id:
+        :paramtype asset_id: str
+        :keyword data_container_name:
+        :paramtype data_container_name: str
+        :keyword version_id:
+        :paramtype version_id: str
+        """
+        super(DataCallRequest, self).__init__(**kwargs)
+        self.data_uri = data_uri
+        self.data_type = data_type
+        self.asset_id = asset_id
+        self.data_container_name = data_container_name
+        self.version_id = version_id
+
+
+class DataContainer(msrest.serialization.Model):
+    """DataContainer.
+
+    All required parameters must be populated in order to send to Azure.
+
+    :ivar name: Required.
+    :vartype name: str
+    :ivar data_type: Required.
+    :vartype data_type: str
+    :ivar mutable_props:
+    :vartype mutable_props: ~azure.mgmt.machinelearningservices.models.DataContainerMutable
+    :ivar is_registered:
+    :vartype is_registered: bool
+    """
+
+    _validation = {
+        'name': {'required': True},
+        'data_type': {'required': True},
+    }
+
+    _attribute_map = {
+        'name': {'key': 'name', 'type': 'str'},
+        'data_type': {'key': 'dataType', 'type': 'str'},
+        'mutable_props': {'key': 'mutableProps', 'type': 'DataContainerMutable'},
+        'is_registered': {'key': 'isRegistered', 'type': 'bool'},
+    }
+
+    def __init__(
+        self,
+        *,
+        name: str,
+        data_type: str,
+        mutable_props: Optional["DataContainerMutable"] = None,
+        is_registered: Optional[bool] = None,
+        **kwargs
+    ):
+        """
+        :keyword name: Required.
+        :paramtype name: str
+        :keyword data_type: Required.
+        :paramtype data_type: str
+        :keyword mutable_props:
+        :paramtype mutable_props: ~azure.mgmt.machinelearningservices.models.DataContainerMutable
+        :keyword is_registered:
+        :paramtype is_registered: bool
+        """
+        super(DataContainer, self).__init__(**kwargs)
+        self.name = name
+        self.data_type = data_type
+        self.mutable_props = mutable_props
+        self.is_registered = is_registered
+
+
+class DataContainerEntity(msrest.serialization.Model):
+    """DataContainerEntity.
+
+    :ivar data_container:
+    :vartype data_container: ~azure.mgmt.machinelearningservices.models.DataContainer
+    :ivar entity_metadata:
+    :vartype entity_metadata: ~azure.mgmt.machinelearningservices.models.EntityMetadata
+    :ivar latest_version:
+    :vartype latest_version: ~azure.mgmt.machinelearningservices.models.DataVersionEntity
+    :ivar next_version_id:
+    :vartype next_version_id: str
+    :ivar legacy_dataset_type:
+    :vartype legacy_dataset_type: str
+    """
+
+    _attribute_map = {
+        'data_container': {'key': 'dataContainer', 'type': 'DataContainer'},
+        'entity_metadata': {'key': 'entityMetadata', 'type': 'EntityMetadata'},
+        'latest_version': {'key': 'latestVersion', 'type': 'DataVersionEntity'},
+        'next_version_id': {'key': 'nextVersionId', 'type': 'str'},
+        'legacy_dataset_type': {'key': 'legacyDatasetType', 'type': 'str'},
+    }
+
+    def __init__(
+        self,
+        *,
+        data_container: Optional["DataContainer"] = None,
+        entity_metadata: Optional["EntityMetadata"] = None,
+        latest_version: Optional["DataVersionEntity"] = None,
+        next_version_id: Optional[str] = None,
+        legacy_dataset_type: Optional[str] = None,
+        **kwargs
+    ):
+        """
+        :keyword data_container:
+        :paramtype data_container: ~azure.mgmt.machinelearningservices.models.DataContainer
+        :keyword entity_metadata:
+        :paramtype entity_metadata: ~azure.mgmt.machinelearningservices.models.EntityMetadata
+        :keyword latest_version:
+        :paramtype latest_version: ~azure.mgmt.machinelearningservices.models.DataVersionEntity
+        :keyword next_version_id:
+        :paramtype next_version_id: str
+        :keyword legacy_dataset_type:
+        :paramtype legacy_dataset_type: str
+        """
+        super(DataContainerEntity, self).__init__(**kwargs)
+        self.data_container = data_container
+        self.entity_metadata = entity_metadata
+        self.latest_version = latest_version
+        self.next_version_id = next_version_id
+        self.legacy_dataset_type = legacy_dataset_type
+
+
+class DataContainerMutable(msrest.serialization.Model):
+    """DataContainerMutable.
+
+    :ivar description:
+    :vartype description: str
+    :ivar tags: A set of tags. Dictionary of :code:`<string>`.
+    :vartype tags: dict[str, str]
+    :ivar is_archived:
+    :vartype is_archived: bool
+    """
+
+    _attribute_map = {
+        'description': {'key': 'description', 'type': 'str'},
+        'tags': {'key': 'tags', 'type': '{str}'},
+        'is_archived': {'key': 'isArchived', 'type': 'bool'},
+    }
+
+    def __init__(
+        self,
+        *,
+        description: Optional[str] = None,
+        tags: Optional[Dict[str, str]] = None,
+        is_archived: Optional[bool] = None,
+        **kwargs
+    ):
+        """
+        :keyword description:
+        :paramtype description: str
+        :keyword tags: A set of tags. Dictionary of :code:`<string>`.
+        :paramtype tags: dict[str, str]
+        :keyword is_archived:
+        :paramtype is_archived: bool
+        """
+        super(DataContainerMutable, self).__init__(**kwargs)
+        self.description = description
+        self.tags = tags
+        self.is_archived = is_archived
+
+
+class DataField(msrest.serialization.Model):
+    """DataField.
+
+    :ivar type: Possible values include: "String", "Boolean", "Integer", "Decimal", "Date",
+     "Unknown", "Error", "Null", "DataRow", "List", "Stream".
+    :vartype type: str or ~azure.mgmt.machinelearningservices.models.FieldType
+    :ivar value: Anything.
+    :vartype value: any
+    """
+
+    _attribute_map = {
+        'type': {'key': 'type', 'type': 'str'},
+        'value': {'key': 'value', 'type': 'object'},
+    }
+
+    def __init__(
+        self,
+        *,
+        type: Optional[Union[str, "FieldType"]] = None,
+        value: Optional[Any] = None,
+        **kwargs
+    ):
+        """
+        :keyword type: Possible values include: "String", "Boolean", "Integer", "Decimal", "Date",
+         "Unknown", "Error", "Null", "DataRow", "List", "Stream".
+        :paramtype type: str or ~azure.mgmt.machinelearningservices.models.FieldType
+        :keyword value: Anything.
+        :paramtype value: any
+        """
+        super(DataField, self).__init__(**kwargs)
+        self.type = type
+        self.value = value
+
+
+class Dataset(msrest.serialization.Model):
+    """Dataset.
+
+    :ivar dataset_id:
+    :vartype dataset_id: str
+    :ivar dataset_state:
+    :vartype dataset_state: ~azure.mgmt.machinelearningservices.models.DatasetState
+    :ivar latest:
+    :vartype latest: ~azure.mgmt.machinelearningservices.models.DatasetDefinition
+    :ivar next_version_id:
+    :vartype next_version_id: str
+    :ivar created_time:
+    :vartype created_time: ~datetime.datetime
+    :ivar modified_time:
+    :vartype modified_time: ~datetime.datetime
+    :ivar etag:
+    :vartype etag: str
+    :ivar name:
+    :vartype name: str
+    :ivar description:
+    :vartype description: str
+    :ivar tags: A set of tags. Dictionary of :code:`<string>`.
+    :vartype tags: dict[str, str]
+    :ivar is_visible:
+    :vartype is_visible: bool
+    :ivar default_compute:
+    :vartype default_compute: str
+    :ivar dataset_type:
+    :vartype dataset_type: str
+    """
+
+    _attribute_map = {
+        'dataset_id': {'key': 'datasetId', 'type': 'str'},
+        'dataset_state': {'key': 'datasetState', 'type': 'DatasetState'},
+        'latest': {'key': 'latest', 'type': 'DatasetDefinition'},
+        'next_version_id': {'key': 'nextVersionId', 'type': 'str'},
+        'created_time': {'key': 'createdTime', 'type': 'iso-8601'},
+        'modified_time': {'key': 'modifiedTime', 'type': 'iso-8601'},
+        'etag': {'key': 'etag', 'type': 'str'},
+        'name': {'key': 'name', 'type': 'str'},
+        'description': {'key': 'description', 'type': 'str'},
+        'tags': {'key': 'tags', 'type': '{str}'},
+        'is_visible': {'key': 'isVisible', 'type': 'bool'},
+        'default_compute': {'key': 'defaultCompute', 'type': 'str'},
+        'dataset_type': {'key': 'datasetType', 'type': 'str'},
+    }
+
+    def __init__(
+        self,
+        *,
+        dataset_id: Optional[str] = None,
+        dataset_state: Optional["DatasetState"] = None,
+        latest: Optional["DatasetDefinition"] = None,
+        next_version_id: Optional[str] = None,
+        created_time: Optional[datetime.datetime] = None,
+        modified_time: Optional[datetime.datetime] = None,
+        etag: Optional[str] = None,
+        name: Optional[str] = None,
+        description: Optional[str] = None,
+        tags: Optional[Dict[str, str]] = None,
+        is_visible: Optional[bool] = None,
+        default_compute: Optional[str] = None,
+        dataset_type: Optional[str] = None,
+        **kwargs
+    ):
+        """
+        :keyword dataset_id:
+        :paramtype dataset_id: str
+        :keyword dataset_state:
+        :paramtype dataset_state: ~azure.mgmt.machinelearningservices.models.DatasetState
+        :keyword latest:
+        :paramtype latest: ~azure.mgmt.machinelearningservices.models.DatasetDefinition
+        :keyword next_version_id:
+        :paramtype next_version_id: str
+        :keyword created_time:
+        :paramtype created_time: ~datetime.datetime
+        :keyword modified_time:
+        :paramtype modified_time: ~datetime.datetime
+        :keyword etag:
+        :paramtype etag: str
+        :keyword name:
+        :paramtype name: str
+        :keyword description:
+        :paramtype description: str
+        :keyword tags: A set of tags. Dictionary of :code:`<string>`.
+        :paramtype tags: dict[str, str]
+        :keyword is_visible:
+        :paramtype is_visible: bool
+        :keyword default_compute:
+        :paramtype default_compute: str
+        :keyword dataset_type:
+        :paramtype dataset_type: str
+        """
+        super(Dataset, self).__init__(**kwargs)
+        self.dataset_id = dataset_id
+        self.dataset_state = dataset_state
+        self.latest = latest
+        self.next_version_id = next_version_id
+        self.created_time = created_time
+        self.modified_time = modified_time
+        self.etag = etag
+        self.name = name
+        self.description = description
+        self.tags = tags
+        self.is_visible = is_visible
+        self.default_compute = default_compute
+        self.dataset_type = dataset_type
+
+
+class DatasetDefinition(msrest.serialization.Model):
+    """DatasetDefinition.
+
+    :ivar dataset_id:
+    :vartype dataset_id: str
+    :ivar version_id:
+    :vartype version_id: str
+    :ivar dataset_definition_state:
+    :vartype dataset_definition_state: ~azure.mgmt.machinelearningservices.models.DatasetState
+    :ivar dataflow:
+    :vartype dataflow: str
+    :ivar dataflow_type: Possible values include: "Json", "Yaml".
+    :vartype dataflow_type: str or ~azure.mgmt.machinelearningservices.models.DataflowType
+    :ivar data_path:
+    :vartype data_path: ~azure.mgmt.machinelearningservices.models.DatasetPath
+    :ivar partition_format_in_path:
+    :vartype partition_format_in_path: str
+    :ivar profile_action_result:
+    :vartype profile_action_result: ~azure.mgmt.machinelearningservices.models.ProfileActionResult
+    :ivar notes:
+    :vartype notes: str
+    :ivar etag:
+    :vartype etag: str
+    :ivar created_time:
+    :vartype created_time: ~datetime.datetime
+    :ivar modified_time:
+    :vartype modified_time: ~datetime.datetime
+    :ivar data_expiry_time:
+    :vartype data_expiry_time: ~datetime.datetime
+    :ivar created_by:
+    :vartype created_by: ~azure.mgmt.machinelearningservices.models.User
+    :ivar modified_by:
+    :vartype modified_by: ~azure.mgmt.machinelearningservices.models.User
+    :ivar file_type:
+    :vartype file_type: str
+    :ivar properties: Dictionary of :code:`<any>`.
+    :vartype properties: dict[str, any]
+    :ivar saved_dataset_id:
+    :vartype saved_dataset_id: str
+    :ivar telemetry_info: Dictionary of :code:`<string>`.
+    :vartype telemetry_info: dict[str, str]
+    :ivar use_description_tags_from_definition:
+    :vartype use_description_tags_from_definition: bool
+    :ivar description:
+    :vartype description: str
+    :ivar tags: A set of tags. Dictionary of :code:`<string>`.
+    :vartype tags: dict[str, str]
+    """
+
+    _attribute_map = {
+        'dataset_id': {'key': 'datasetId', 'type': 'str'},
+        'version_id': {'key': 'versionId', 'type': 'str'},
+        'dataset_definition_state': {'key': 'datasetDefinitionState', 'type': 'DatasetState'},
+        'dataflow': {'key': 'dataflow', 'type': 'str'},
+        'dataflow_type': {'key': 'dataflowType', 'type': 'str'},
+        'data_path': {'key': 'dataPath', 'type': 'DatasetPath'},
+        'partition_format_in_path': {'key': 'partitionFormatInPath', 'type': 'str'},
+        'profile_action_result': {'key': 'profileActionResult', 'type': 'ProfileActionResult'},
+        'notes': {'key': 'notes', 'type': 'str'},
+        'etag': {'key': 'etag', 'type': 'str'},
+        'created_time': {'key': 'createdTime', 'type': 'iso-8601'},
+        'modified_time': {'key': 'modifiedTime', 'type': 'iso-8601'},
+        'data_expiry_time': {'key': 'dataExpiryTime', 'type': 'iso-8601'},
+        'created_by': {'key': 'createdBy', 'type': 'User'},
+        'modified_by': {'key': 'modifiedBy', 'type': 'User'},
+        'file_type': {'key': 'fileType', 'type': 'str'},
+        'properties': {'key': 'properties', 'type': '{object}'},
+        'saved_dataset_id': {'key': 'savedDatasetId', 'type': 'str'},
+        'telemetry_info': {'key': 'telemetryInfo', 'type': '{str}'},
+        'use_description_tags_from_definition': {'key': 'useDescriptionTagsFromDefinition', 'type': 'bool'},
+        'description': {'key': 'description', 'type': 'str'},
+        'tags': {'key': 'tags', 'type': '{str}'},
+    }
+
+    def __init__(
+        self,
+        *,
+        dataset_id: Optional[str] = None,
+        version_id: Optional[str] = None,
+        dataset_definition_state: Optional["DatasetState"] = None,
+        dataflow: Optional[str] = None,
+        dataflow_type: Optional[Union[str, "DataflowType"]] = None,
+        data_path: Optional["DatasetPath"] = None,
+        partition_format_in_path: Optional[str] = None,
+        profile_action_result: Optional["ProfileActionResult"] = None,
+        notes: Optional[str] = None,
+        etag: Optional[str] = None,
+        created_time: Optional[datetime.datetime] = None,
+        modified_time: Optional[datetime.datetime] = None,
+        data_expiry_time: Optional[datetime.datetime] = None,
+        created_by: Optional["User"] = None,
+        modified_by: Optional["User"] = None,
+        file_type: Optional[str] = None,
+        properties: Optional[Dict[str, Any]] = None,
+        saved_dataset_id: Optional[str] = None,
+        telemetry_info: Optional[Dict[str, str]] = None,
+        use_description_tags_from_definition: Optional[bool] = None,
+        description: Optional[str] = None,
+        tags: Optional[Dict[str, str]] = None,
+        **kwargs
+    ):
+        """
+        :keyword dataset_id:
+        :paramtype dataset_id: str
+        :keyword version_id:
+        :paramtype version_id: str
+        :keyword dataset_definition_state:
+        :paramtype dataset_definition_state: ~azure.mgmt.machinelearningservices.models.DatasetState
+        :keyword dataflow:
+        :paramtype dataflow: str
+        :keyword dataflow_type: Possible values include: "Json", "Yaml".
+        :paramtype dataflow_type: str or ~azure.mgmt.machinelearningservices.models.DataflowType
+        :keyword data_path:
+        :paramtype data_path: ~azure.mgmt.machinelearningservices.models.DatasetPath
+        :keyword partition_format_in_path:
+        :paramtype partition_format_in_path: str
+        :keyword profile_action_result:
+        :paramtype profile_action_result:
+         ~azure.mgmt.machinelearningservices.models.ProfileActionResult
+        :keyword notes:
+        :paramtype notes: str
+        :keyword etag:
+        :paramtype etag: str
+        :keyword created_time:
+        :paramtype created_time: ~datetime.datetime
+        :keyword modified_time:
+        :paramtype modified_time: ~datetime.datetime
+        :keyword data_expiry_time:
+        :paramtype data_expiry_time: ~datetime.datetime
+        :keyword created_by:
+        :paramtype created_by: ~azure.mgmt.machinelearningservices.models.User
+        :keyword modified_by:
+        :paramtype modified_by: ~azure.mgmt.machinelearningservices.models.User
+        :keyword file_type:
+        :paramtype file_type: str
+        :keyword properties: Dictionary of :code:`<any>`.
+        :paramtype properties: dict[str, any]
+        :keyword saved_dataset_id:
+        :paramtype saved_dataset_id: str
+        :keyword telemetry_info: Dictionary of :code:`<string>`.
+        :paramtype telemetry_info: dict[str, str]
+        :keyword use_description_tags_from_definition:
+        :paramtype use_description_tags_from_definition: bool
+        :keyword description:
+        :paramtype description: str
+        :keyword tags: A set of tags. Dictionary of :code:`<string>`.
+        :paramtype tags: dict[str, str]
+        """
+        super(DatasetDefinition, self).__init__(**kwargs)
+        self.dataset_id = dataset_id
+        self.version_id = version_id
+        self.dataset_definition_state = dataset_definition_state
+        self.dataflow = dataflow
+        self.dataflow_type = dataflow_type
+        self.data_path = data_path
+        self.partition_format_in_path = partition_format_in_path
+        self.profile_action_result = profile_action_result
+        self.notes = notes
+        self.etag = etag
+        self.created_time = created_time
+        self.modified_time = modified_time
+        self.data_expiry_time = data_expiry_time
+        self.created_by = created_by
+        self.modified_by = modified_by
+        self.file_type = file_type
+        self.properties = properties
+        self.saved_dataset_id = saved_dataset_id
+        self.telemetry_info = telemetry_info
+        self.use_description_tags_from_definition = use_description_tags_from_definition
+        self.description = description
+        self.tags = tags
+
+
+class DatasetDefinitionReference(msrest.serialization.Model):
+    """DatasetDefinitionReference.
+
+    :ivar dataset_id:
+    :vartype dataset_id: str
+    :ivar definition_version:
+    :vartype definition_version: str
+    """
+
+    _attribute_map = {
+        'dataset_id': {'key': 'datasetId', 'type': 'str'},
+        'definition_version': {'key': 'definitionVersion', 'type': 'str'},
+    }
+
+    def __init__(
+        self,
+        *,
+        dataset_id: Optional[str] = None,
+        definition_version: Optional[str] = None,
+        **kwargs
+    ):
+        """
+        :keyword dataset_id:
+        :paramtype dataset_id: str
+        :keyword definition_version:
+        :paramtype definition_version: str
+        """
+        super(DatasetDefinitionReference, self).__init__(**kwargs)
+        self.dataset_id = dataset_id
+        self.definition_version = definition_version
+
+
+class DatasetPath(msrest.serialization.Model):
+    """DatasetPath.
+
+    :ivar datastore_name:
+    :vartype datastore_name: str
+    :ivar relative_path:
+    :vartype relative_path: str
+    :ivar azure_file_path:
+    :vartype azure_file_path: str
+    :ivar paths:
+    :vartype paths: list[str]
+    :ivar sql_data_path:
+    :vartype sql_data_path: ~azure.mgmt.machinelearningservices.models.SqlDataPath
+    :ivar http_url:
+    :vartype http_url: str
+    :ivar additional_properties: Dictionary of :code:`<any>`.
+    :vartype additional_properties: dict[str, any]
+    :ivar partition_format:
+    :vartype partition_format: str
+    :ivar partition_format_ignore_error:
+    :vartype partition_format_ignore_error: bool
+    """
+
+    _attribute_map = {
+        'datastore_name': {'key': 'datastoreName', 'type': 'str'},
+        'relative_path': {'key': 'relativePath', 'type': 'str'},
+        'azure_file_path': {'key': 'azureFilePath', 'type': 'str'},
+        'paths': {'key': 'paths', 'type': '[str]'},
+        'sql_data_path': {'key': 'sqlDataPath', 'type': 'SqlDataPath'},
+        'http_url': {'key': 'httpUrl', 'type': 'str'},
+        'additional_properties': {'key': 'additionalProperties', 'type': '{object}'},
+        'partition_format': {'key': 'partitionFormat', 'type': 'str'},
+        'partition_format_ignore_error': {'key': 'partitionFormatIgnoreError', 'type': 'bool'},
+    }
+
+    def __init__(
+        self,
+        *,
+        datastore_name: Optional[str] = None,
+        relative_path: Optional[str] = None,
+        azure_file_path: Optional[str] = None,
+        paths: Optional[List[str]] = None,
+        sql_data_path: Optional["SqlDataPath"] = None,
+        http_url: Optional[str] = None,
+        additional_properties: Optional[Dict[str, Any]] = None,
+        partition_format: Optional[str] = None,
+        partition_format_ignore_error: Optional[bool] = None,
+        **kwargs
+    ):
+        """
+        :keyword datastore_name:
+        :paramtype datastore_name: str
+        :keyword relative_path:
+        :paramtype relative_path: str
+        :keyword azure_file_path:
+        :paramtype azure_file_path: str
+        :keyword paths:
+        :paramtype paths: list[str]
+        :keyword sql_data_path:
+        :paramtype sql_data_path: ~azure.mgmt.machinelearningservices.models.SqlDataPath
+        :keyword http_url:
+        :paramtype http_url: str
+        :keyword additional_properties: Dictionary of :code:`<any>`.
+        :paramtype additional_properties: dict[str, any]
+        :keyword partition_format:
+        :paramtype partition_format: str
+        :keyword partition_format_ignore_error:
+        :paramtype partition_format_ignore_error: bool
+        """
+        super(DatasetPath, self).__init__(**kwargs)
+        self.datastore_name = datastore_name
+        self.relative_path = relative_path
+        self.azure_file_path = azure_file_path
+        self.paths = paths
+        self.sql_data_path = sql_data_path
+        self.http_url = http_url
+        self.additional_properties = additional_properties
+        self.partition_format = partition_format
+        self.partition_format_ignore_error = partition_format_ignore_error
+
+
+class DatasetState(msrest.serialization.Model):
+    """DatasetState.
+
+    :ivar state:
+    :vartype state: str
+    :ivar deprecated_by:
+    :vartype deprecated_by: ~azure.mgmt.machinelearningservices.models.DatasetDefinitionReference
+    :ivar etag:
+    :vartype etag: str
+    """
+
+    _attribute_map = {
+        'state': {'key': 'state', 'type': 'str'},
+        'deprecated_by': {'key': 'deprecatedBy', 'type': 'DatasetDefinitionReference'},
+        'etag': {'key': 'etag', 'type': 'str'},
+    }
+
+    def __init__(
+        self,
+        *,
+        state: Optional[str] = None,
+        deprecated_by: Optional["DatasetDefinitionReference"] = None,
+        etag: Optional[str] = None,
+        **kwargs
+    ):
+        """
+        :keyword state:
+        :paramtype state: str
+        :keyword deprecated_by:
+        :paramtype deprecated_by: ~azure.mgmt.machinelearningservices.models.DatasetDefinitionReference
+        :keyword etag:
+        :paramtype etag: str
+        """
+        super(DatasetState, self).__init__(**kwargs)
+        self.state = state
+        self.deprecated_by = deprecated_by
+        self.etag = etag
+
+
+class DatasetV2(msrest.serialization.Model):
+    """DatasetV2.
+
+    :ivar dataset_id:
+    :vartype dataset_id: str
+    :ivar name:
+    :vartype name: str
+    :ivar version_id:
+    :vartype version_id: str
+    :ivar dataflow:
+    :vartype dataflow: str
+    :ivar created_time:
+    :vartype created_time: ~datetime.datetime
+    :ivar modified_time:
+    :vartype modified_time: ~datetime.datetime
+    :ivar created_by:
+    :vartype created_by: ~azure.mgmt.machinelearningservices.models.User
+    :ivar modified_by:
+    :vartype modified_by: ~azure.mgmt.machinelearningservices.models.User
+    :ivar properties: Dictionary of :code:`<string>`.
+    :vartype properties: dict[str, str]
+    :ivar telemetry_info: Dictionary of :code:`<string>`.
+    :vartype telemetry_info: dict[str, str]
+    :ivar description:
+    :vartype description: str
+    :ivar is_anonymous:
+    :vartype is_anonymous: bool
+    :ivar tags: A set of tags. Dictionary of :code:`<string>`.
+    :vartype tags: dict[str, str]
+    :ivar legacy_properties: Dictionary of :code:`<any>`.
+    :vartype legacy_properties: dict[str, any]
+    :ivar data_expiry_time:
+    :vartype data_expiry_time: ~datetime.datetime
+    :ivar legacy: Dictionary of :code:`<any>`.
+    :vartype legacy: dict[str, any]
+    """
+
+    _attribute_map = {
+        'dataset_id': {'key': 'datasetId', 'type': 'str'},
+        'name': {'key': 'name', 'type': 'str'},
+        'version_id': {'key': 'versionId', 'type': 'str'},
+        'dataflow': {'key': 'dataflow', 'type': 'str'},
+        'created_time': {'key': 'createdTime', 'type': 'iso-8601'},
+        'modified_time': {'key': 'modifiedTime', 'type': 'iso-8601'},
+        'created_by': {'key': 'createdBy', 'type': 'User'},
+        'modified_by': {'key': 'modifiedBy', 'type': 'User'},
+        'properties': {'key': 'properties', 'type': '{str}'},
+        'telemetry_info': {'key': 'telemetryInfo', 'type': '{str}'},
+        'description': {'key': 'description', 'type': 'str'},
+        'is_anonymous': {'key': 'isAnonymous', 'type': 'bool'},
+        'tags': {'key': 'tags', 'type': '{str}'},
+        'legacy_properties': {'key': 'legacyProperties', 'type': '{object}'},
+        'data_expiry_time': {'key': 'dataExpiryTime', 'type': 'iso-8601'},
+        'legacy': {'key': 'legacy', 'type': '{object}'},
+    }
+
+    def __init__(
+        self,
+        *,
+        dataset_id: Optional[str] = None,
+        name: Optional[str] = None,
+        version_id: Optional[str] = None,
+        dataflow: Optional[str] = None,
+        created_time: Optional[datetime.datetime] = None,
+        modified_time: Optional[datetime.datetime] = None,
+        created_by: Optional["User"] = None,
+        modified_by: Optional["User"] = None,
+        properties: Optional[Dict[str, str]] = None,
+        telemetry_info: Optional[Dict[str, str]] = None,
+        description: Optional[str] = None,
+        is_anonymous: Optional[bool] = None,
+        tags: Optional[Dict[str, str]] = None,
+        legacy_properties: Optional[Dict[str, Any]] = None,
+        data_expiry_time: Optional[datetime.datetime] = None,
+        legacy: Optional[Dict[str, Any]] = None,
+        **kwargs
+    ):
+        """
+        :keyword dataset_id:
+        :paramtype dataset_id: str
+        :keyword name:
+        :paramtype name: str
+        :keyword version_id:
+        :paramtype version_id: str
+        :keyword dataflow:
+        :paramtype dataflow: str
+        :keyword created_time:
+        :paramtype created_time: ~datetime.datetime
+        :keyword modified_time:
+        :paramtype modified_time: ~datetime.datetime
+        :keyword created_by:
+        :paramtype created_by: ~azure.mgmt.machinelearningservices.models.User
+        :keyword modified_by:
+        :paramtype modified_by: ~azure.mgmt.machinelearningservices.models.User
+        :keyword properties: Dictionary of :code:`<string>`.
+        :paramtype properties: dict[str, str]
+        :keyword telemetry_info: Dictionary of :code:`<string>`.
+        :paramtype telemetry_info: dict[str, str]
+        :keyword description:
+        :paramtype description: str
+        :keyword is_anonymous:
+        :paramtype is_anonymous: bool
+        :keyword tags: A set of tags. Dictionary of :code:`<string>`.
+        :paramtype tags: dict[str, str]
+        :keyword legacy_properties: Dictionary of :code:`<any>`.
+        :paramtype legacy_properties: dict[str, any]
+        :keyword data_expiry_time:
+        :paramtype data_expiry_time: ~datetime.datetime
+        :keyword legacy: Dictionary of :code:`<any>`.
+        :paramtype legacy: dict[str, any]
+        """
+        super(DatasetV2, self).__init__(**kwargs)
+        self.dataset_id = dataset_id
+        self.name = name
+        self.version_id = version_id
+        self.dataflow = dataflow
+        self.created_time = created_time
+        self.modified_time = modified_time
+        self.created_by = created_by
+        self.modified_by = modified_by
+        self.properties = properties
+        self.telemetry_info = telemetry_info
+        self.description = description
+        self.is_anonymous = is_anonymous
+        self.tags = tags
+        self.legacy_properties = legacy_properties
+        self.data_expiry_time = data_expiry_time
+        self.legacy = legacy
+
+
+class DataUriV2Response(msrest.serialization.Model):
+    """DataUriV2Response.
+
+    :ivar uri:
+    :vartype uri: str
+    :ivar type:
+    :vartype type: str
+    """
+
+    _attribute_map = {
+        'uri': {'key': 'uri', 'type': 'str'},
+        'type': {'key': 'type', 'type': 'str'},
+    }
+
+    def __init__(
+        self,
+        *,
+        uri: Optional[str] = None,
+        type: Optional[str] = None,
+        **kwargs
+    ):
+        """
+        :keyword uri:
+        :paramtype uri: str
+        :keyword type:
+        :paramtype type: str
+        """
+        super(DataUriV2Response, self).__init__(**kwargs)
+        self.uri = uri
+        self.type = type
+
+
+class DataVersion(msrest.serialization.Model):
+    """DataVersion.
+
+    All required parameters must be populated in order to send to Azure.
+
+    :ivar asset_id:
+    :vartype asset_id: str
+    :ivar data_container_name: Required.
+    :vartype data_container_name: str
+    :ivar data_type: Required.
+    :vartype data_type: str
+    :ivar data_uri: Required.
+    :vartype data_uri: str
+    :ivar version_id: Required.
+    :vartype version_id: str
+    :ivar mutable_props:
+    :vartype mutable_props: ~azure.mgmt.machinelearningservices.models.DataVersionMutable
+    :ivar referenced_data_uris:
+    :vartype referenced_data_uris: list[str]
+    :ivar properties: Dictionary of :code:`<string>`.
+    :vartype properties: dict[str, str]
+    """
+
+    _validation = {
+        'data_container_name': {'required': True},
+        'data_type': {'required': True},
+        'data_uri': {'required': True},
+        'version_id': {'required': True},
+    }
+
+    _attribute_map = {
+        'asset_id': {'key': 'assetId', 'type': 'str'},
+        'data_container_name': {'key': 'dataContainerName', 'type': 'str'},
+        'data_type': {'key': 'dataType', 'type': 'str'},
+        'data_uri': {'key': 'dataUri', 'type': 'str'},
+        'version_id': {'key': 'versionId', 'type': 'str'},
+        'mutable_props': {'key': 'mutableProps', 'type': 'DataVersionMutable'},
+        'referenced_data_uris': {'key': 'referencedDataUris', 'type': '[str]'},
+        'properties': {'key': 'properties', 'type': '{str}'},
+    }
+
+    def __init__(
+        self,
+        *,
+        data_container_name: str,
+        data_type: str,
+        data_uri: str,
+        version_id: str,
+        asset_id: Optional[str] = None,
+        mutable_props: Optional["DataVersionMutable"] = None,
+        referenced_data_uris: Optional[List[str]] = None,
+        properties: Optional[Dict[str, str]] = None,
+        **kwargs
+    ):
+        """
+        :keyword asset_id:
+        :paramtype asset_id: str
+        :keyword data_container_name: Required.
+        :paramtype data_container_name: str
+        :keyword data_type: Required.
+        :paramtype data_type: str
+        :keyword data_uri: Required.
+        :paramtype data_uri: str
+        :keyword version_id: Required.
+        :paramtype version_id: str
+        :keyword mutable_props:
+        :paramtype mutable_props: ~azure.mgmt.machinelearningservices.models.DataVersionMutable
+        :keyword referenced_data_uris:
+        :paramtype referenced_data_uris: list[str]
+        :keyword properties: Dictionary of :code:`<string>`.
+        :paramtype properties: dict[str, str]
+        """
+        super(DataVersion, self).__init__(**kwargs)
+        self.asset_id = asset_id
+        self.data_container_name = data_container_name
+        self.data_type = data_type
+        self.data_uri = data_uri
+        self.version_id = version_id
+        self.mutable_props = mutable_props
+        self.referenced_data_uris = referenced_data_uris
+        self.properties = properties
+
+
+class DataVersionEntity(msrest.serialization.Model):
+    """DataVersionEntity.
+
+    :ivar data_version:
+    :vartype data_version: ~azure.mgmt.machinelearningservices.models.DataVersion
+    :ivar entity_metadata:
+    :vartype entity_metadata: ~azure.mgmt.machinelearningservices.models.EntityMetadata
+    """
+
+    _attribute_map = {
+        'data_version': {'key': 'dataVersion', 'type': 'DataVersion'},
+        'entity_metadata': {'key': 'entityMetadata', 'type': 'EntityMetadata'},
+    }
+
+    def __init__(
+        self,
+        *,
+        data_version: Optional["DataVersion"] = None,
+        entity_metadata: Optional["EntityMetadata"] = None,
+        **kwargs
+    ):
+        """
+        :keyword data_version:
+        :paramtype data_version: ~azure.mgmt.machinelearningservices.models.DataVersion
+        :keyword entity_metadata:
+        :paramtype entity_metadata: ~azure.mgmt.machinelearningservices.models.EntityMetadata
+        """
+        super(DataVersionEntity, self).__init__(**kwargs)
+        self.data_version = data_version
+        self.entity_metadata = entity_metadata
+
+
+class DataVersionMutable(msrest.serialization.Model):
+    """DataVersionMutable.
+
+    :ivar data_expiry_time:
+    :vartype data_expiry_time: ~datetime.datetime
+    :ivar description:
+    :vartype description: str
+    :ivar tags: A set of tags. Dictionary of :code:`<string>`.
+    :vartype tags: dict[str, str]
+    :ivar is_archived:
+    :vartype is_archived: bool
+    """
+
+    _attribute_map = {
+        'data_expiry_time': {'key': 'dataExpiryTime', 'type': 'iso-8601'},
+        'description': {'key': 'description', 'type': 'str'},
+        'tags': {'key': 'tags', 'type': '{str}'},
+        'is_archived': {'key': 'isArchived', 'type': 'bool'},
+    }
+
+    def __init__(
+        self,
+        *,
+        data_expiry_time: Optional[datetime.datetime] = None,
+        description: Optional[str] = None,
+        tags: Optional[Dict[str, str]] = None,
+        is_archived: Optional[bool] = None,
+        **kwargs
+    ):
+        """
+        :keyword data_expiry_time:
+        :paramtype data_expiry_time: ~datetime.datetime
+        :keyword description:
+        :paramtype description: str
+        :keyword tags: A set of tags. Dictionary of :code:`<string>`.
+        :paramtype tags: dict[str, str]
+        :keyword is_archived:
+        :paramtype is_archived: bool
+        """
+        super(DataVersionMutable, self).__init__(**kwargs)
+        self.data_expiry_time = data_expiry_time
+        self.description = description
+        self.tags = tags
+        self.is_archived = is_archived
+
+
+class DataViewSetResult(msrest.serialization.Model):
+    """DataViewSetResult.
+
+    :ivar schema:
+    :vartype schema: list[~azure.mgmt.machinelearningservices.models.ColumnDefinition]
+    :ivar rows:
+    :vartype rows: list[list[~azure.mgmt.machinelearningservices.models.DataField]]
+    """
+
+    _attribute_map = {
+        'schema': {'key': 'schema', 'type': '[ColumnDefinition]'},
+        'rows': {'key': 'rows', 'type': '[[DataField]]'},
+    }
+
+    def __init__(
+        self,
+        *,
+        schema: Optional[List["ColumnDefinition"]] = None,
+        rows: Optional[List[List["DataField"]]] = None,
+        **kwargs
+    ):
+        """
+        :keyword schema:
+        :paramtype schema: list[~azure.mgmt.machinelearningservices.models.ColumnDefinition]
+        :keyword rows:
+        :paramtype rows: list[list[~azure.mgmt.machinelearningservices.models.DataField]]
+        """
+        super(DataViewSetResult, self).__init__(**kwargs)
+        self.schema = schema
+        self.rows = rows
+
+
+class EntityMetadata(msrest.serialization.Model):
+    """EntityMetadata.
+
+    :ivar etag:
+    :vartype etag: str
+    :ivar created_time:
+    :vartype created_time: ~datetime.datetime
+    :ivar modified_time:
+    :vartype modified_time: ~datetime.datetime
+    :ivar created_by:
+    :vartype created_by: ~azure.mgmt.machinelearningservices.models.User
+    :ivar modified_by:
+    :vartype modified_by: ~azure.mgmt.machinelearningservices.models.User
+    """
+
+    _attribute_map = {
+        'etag': {'key': 'etag', 'type': 'str'},
+        'created_time': {'key': 'createdTime', 'type': 'iso-8601'},
+        'modified_time': {'key': 'modifiedTime', 'type': 'iso-8601'},
+        'created_by': {'key': 'createdBy', 'type': 'User'},
+        'modified_by': {'key': 'modifiedBy', 'type': 'User'},
+    }
+
+    def __init__(
+        self,
+        *,
+        etag: Optional[str] = None,
+        created_time: Optional[datetime.datetime] = None,
+        modified_time: Optional[datetime.datetime] = None,
+        created_by: Optional["User"] = None,
+        modified_by: Optional["User"] = None,
+        **kwargs
+    ):
+        """
+        :keyword etag:
+        :paramtype etag: str
+        :keyword created_time:
+        :paramtype created_time: ~datetime.datetime
+        :keyword modified_time:
+        :paramtype modified_time: ~datetime.datetime
+        :keyword created_by:
+        :paramtype created_by: ~azure.mgmt.machinelearningservices.models.User
+        :keyword modified_by:
+        :paramtype modified_by: ~azure.mgmt.machinelearningservices.models.User
+        """
+        super(EntityMetadata, self).__init__(**kwargs)
+        self.etag = etag
+        self.created_time = created_time
+        self.modified_time = modified_time
+        self.created_by = created_by
+        self.modified_by = modified_by
+
+
+class ErrorAdditionalInfo(msrest.serialization.Model):
+    """The resource management error additional info.
+
+    :ivar type: The additional info type.
+    :vartype type: str
+    :ivar info: The additional info.
+    :vartype info: any
+    """
+
+    _attribute_map = {
+        'type': {'key': 'type', 'type': 'str'},
+        'info': {'key': 'info', 'type': 'object'},
+    }
+
+    def __init__(
+        self,
+        *,
+        type: Optional[str] = None,
+        info: Optional[Any] = None,
+        **kwargs
+    ):
+        """
+        :keyword type: The additional info type.
+        :paramtype type: str
+        :keyword info: The additional info.
+        :paramtype info: any
+        """
+        super(ErrorAdditionalInfo, self).__init__(**kwargs)
+        self.type = type
+        self.info = info
+
+
+class ErrorResponse(msrest.serialization.Model):
+    """The error response.
+
+    :ivar error: The root error.
+    :vartype error: ~azure.mgmt.machinelearningservices.models.RootError
+    :ivar correlation: Dictionary containing correlation details for the error.
+    :vartype correlation: dict[str, str]
+    :ivar environment: The hosting environment.
+    :vartype environment: str
+    :ivar location: The Azure region.
+    :vartype location: str
+    :ivar time: The time in UTC.
+    :vartype time: ~datetime.datetime
+    :ivar component_name: Component name where error originated/encountered.
+    :vartype component_name: str
+    """
+
+    _attribute_map = {
+        'error': {'key': 'error', 'type': 'RootError'},
+        'correlation': {'key': 'correlation', 'type': '{str}'},
+        'environment': {'key': 'environment', 'type': 'str'},
+        'location': {'key': 'location', 'type': 'str'},
+        'time': {'key': 'time', 'type': 'iso-8601'},
+        'component_name': {'key': 'componentName', 'type': 'str'},
+    }
+
+    def __init__(
+        self,
+        *,
+        error: Optional["RootError"] = None,
+        correlation: Optional[Dict[str, str]] = None,
+        environment: Optional[str] = None,
+        location: Optional[str] = None,
+        time: Optional[datetime.datetime] = None,
+        component_name: Optional[str] = None,
+        **kwargs
+    ):
+        """
+        :keyword error: The root error.
+        :paramtype error: ~azure.mgmt.machinelearningservices.models.RootError
+        :keyword correlation: Dictionary containing correlation details for the error.
+        :paramtype correlation: dict[str, str]
+        :keyword environment: The hosting environment.
+        :paramtype environment: str
+        :keyword location: The Azure region.
+        :paramtype location: str
+        :keyword time: The time in UTC.
+        :paramtype time: ~datetime.datetime
+        :keyword component_name: Component name where error originated/encountered.
+        :paramtype component_name: str
+        """
+        super(ErrorResponse, self).__init__(**kwargs)
+        self.error = error
+        self.correlation = correlation
+        self.environment = environment
+        self.location = location
+        self.time = time
+        self.component_name = component_name
+
+
+class HistogramBin(msrest.serialization.Model):
+    """HistogramBin.
+
+    :ivar lower_bound:
+    :vartype lower_bound: float
+    :ivar upper_bound:
+    :vartype upper_bound: float
+    :ivar count:
+    :vartype count: float
+    """
+
+    _attribute_map = {
+        'lower_bound': {'key': 'lowerBound', 'type': 'float'},
+        'upper_bound': {'key': 'upperBound', 'type': 'float'},
+        'count': {'key': 'count', 'type': 'float'},
+    }
+
+    def __init__(
+        self,
+        *,
+        lower_bound: Optional[float] = None,
+        upper_bound: Optional[float] = None,
+        count: Optional[float] = None,
+        **kwargs
+    ):
+        """
+        :keyword lower_bound:
+        :paramtype lower_bound: float
+        :keyword upper_bound:
+        :paramtype upper_bound: float
+        :keyword count:
+        :paramtype count: float
+        """
+        super(HistogramBin, self).__init__(**kwargs)
+        self.lower_bound = lower_bound
+        self.upper_bound = upper_bound
+        self.count = count
+
+
+class HttpContent(msrest.serialization.Model):
+    """HttpContent.
+
+    Variables are only populated by the server, and will be ignored when sending a request.
+
+    :ivar headers:
+    :vartype headers:
+     list[~azure.mgmt.machinelearningservices.models.KeyValuePairStringIEnumerable1]
+    """
+
+    _validation = {
+        'headers': {'readonly': True},
+    }
+
+    _attribute_map = {
+        'headers': {'key': 'headers', 'type': '[KeyValuePairStringIEnumerable1]'},
+    }
+
+    def __init__(
+        self,
+        **kwargs
+    ):
+        """
+        """
+        super(HttpContent, self).__init__(**kwargs)
+        self.headers = None
+
+
+class HttpMethod(msrest.serialization.Model):
+    """HttpMethod.
+
+    :ivar method:
+    :vartype method: str
+    """
+
+    _attribute_map = {
+        'method': {'key': 'method', 'type': 'str'},
+    }
+
+    def __init__(
+        self,
+        *,
+        method: Optional[str] = None,
+        **kwargs
+    ):
+        """
+        :keyword method:
+        :paramtype method: str
+        """
+        super(HttpMethod, self).__init__(**kwargs)
+        self.method = method
+
+
+class HttpRequestMessage(msrest.serialization.Model):
+    """HttpRequestMessage.
+
+    Variables are only populated by the server, and will be ignored when sending a request.
+
+    :ivar version:
+    :vartype version: str
+    :ivar version_policy: Possible values include: "RequestVersionOrLower",
+     "RequestVersionOrHigher", "RequestVersionExact".
+    :vartype version_policy: str or ~azure.mgmt.machinelearningservices.models.HttpVersionPolicy
+    :ivar content:
+    :vartype content: ~azure.mgmt.machinelearningservices.models.HttpContent
+    :ivar method:
+    :vartype method: ~azure.mgmt.machinelearningservices.models.HttpMethod
+    :ivar request_uri:
+    :vartype request_uri: str
+    :ivar headers:
+    :vartype headers:
+     list[~azure.mgmt.machinelearningservices.models.KeyValuePairStringIEnumerable1]
+    :ivar options: Dictionary of :code:`<any>`.
+    :vartype options: dict[str, any]
+    """
+
+    _validation = {
+        'headers': {'readonly': True},
+        'options': {'readonly': True},
+    }
+
+    _attribute_map = {
+        'version': {'key': 'version', 'type': 'str'},
+        'version_policy': {'key': 'versionPolicy', 'type': 'str'},
+        'content': {'key': 'content', 'type': 'HttpContent'},
+        'method': {'key': 'method', 'type': 'HttpMethod'},
+        'request_uri': {'key': 'requestUri', 'type': 'str'},
+        'headers': {'key': 'headers', 'type': '[KeyValuePairStringIEnumerable1]'},
+        'options': {'key': 'options', 'type': '{object}'},
+    }
+
+    def __init__(
+        self,
+        *,
+        version: Optional[str] = None,
+        version_policy: Optional[Union[str, "HttpVersionPolicy"]] = None,
+        content: Optional["HttpContent"] = None,
+        method: Optional["HttpMethod"] = None,
+        request_uri: Optional[str] = None,
+        **kwargs
+    ):
+        """
+        :keyword version:
+        :paramtype version: str
+        :keyword version_policy: Possible values include: "RequestVersionOrLower",
+         "RequestVersionOrHigher", "RequestVersionExact".
+        :paramtype version_policy: str or ~azure.mgmt.machinelearningservices.models.HttpVersionPolicy
+        :keyword content:
+        :paramtype content: ~azure.mgmt.machinelearningservices.models.HttpContent
+        :keyword method:
+        :paramtype method: ~azure.mgmt.machinelearningservices.models.HttpMethod
+        :keyword request_uri:
+        :paramtype request_uri: str
+        """
+        super(HttpRequestMessage, self).__init__(**kwargs)
+        self.version = version
+        self.version_policy = version_policy
+        self.content = content
+        self.method = method
+        self.request_uri = request_uri
+        self.headers = None
+        self.options = None
+
+
+class HttpResponseMessage(msrest.serialization.Model):
+    """HttpResponseMessage.
+
+    Variables are only populated by the server, and will be ignored when sending a request.
+
+    :ivar version:
+    :vartype version: str
+    :ivar content:
+    :vartype content: ~azure.mgmt.machinelearningservices.models.HttpContent
+    :ivar status_code: Possible values include: "Continue", "SwitchingProtocols", "Processing",
+     "EarlyHints", "OK", "Created", "Accepted", "NonAuthoritativeInformation", "NoContent",
+     "ResetContent", "PartialContent", "MultiStatus", "AlreadyReported", "IMUsed", "Ambiguous",
+     "Moved", "Redirect", "RedirectMethod", "NotModified", "UseProxy", "Unused",
+     "TemporaryRedirect", "PermanentRedirect", "BadRequest", "Unauthorized", "PaymentRequired",
+     "Forbidden", "NotFound", "MethodNotAllowed", "NotAcceptable", "ProxyAuthenticationRequired",
+     "RequestTimeout", "Conflict", "Gone", "LengthRequired", "PreconditionFailed",
+     "RequestEntityTooLarge", "RequestUriTooLong", "UnsupportedMediaType",
+     "RequestedRangeNotSatisfiable", "ExpectationFailed", "MisdirectedRequest",
+     "UnprocessableEntity", "Locked", "FailedDependency", "UpgradeRequired", "PreconditionRequired",
+     "TooManyRequests", "RequestHeaderFieldsTooLarge", "UnavailableForLegalReasons",
+     "InternalServerError", "NotImplemented", "BadGateway", "ServiceUnavailable", "GatewayTimeout",
+     "HttpVersionNotSupported", "VariantAlsoNegotiates", "InsufficientStorage", "LoopDetected",
+     "NotExtended", "NetworkAuthenticationRequired".
+    :vartype status_code: str or ~azure.mgmt.machinelearningservices.models.HttpStatusCode
+    :ivar reason_phrase:
+    :vartype reason_phrase: str
+    :ivar headers:
+    :vartype headers:
+     list[~azure.mgmt.machinelearningservices.models.KeyValuePairStringIEnumerable1]
+    :ivar trailing_headers:
+    :vartype trailing_headers:
+     list[~azure.mgmt.machinelearningservices.models.KeyValuePairStringIEnumerable1]
+    :ivar request_message:
+    :vartype request_message: ~azure.mgmt.machinelearningservices.models.HttpRequestMessage
+    :ivar is_success_status_code:
+    :vartype is_success_status_code: bool
+    """
+
+    _validation = {
+        'headers': {'readonly': True},
+        'trailing_headers': {'readonly': True},
+        'is_success_status_code': {'readonly': True},
+    }
+
+    _attribute_map = {
+        'version': {'key': 'version', 'type': 'str'},
+        'content': {'key': 'content', 'type': 'HttpContent'},
+        'status_code': {'key': 'statusCode', 'type': 'str'},
+        'reason_phrase': {'key': 'reasonPhrase', 'type': 'str'},
+        'headers': {'key': 'headers', 'type': '[KeyValuePairStringIEnumerable1]'},
+        'trailing_headers': {'key': 'trailingHeaders', 'type': '[KeyValuePairStringIEnumerable1]'},
+        'request_message': {'key': 'requestMessage', 'type': 'HttpRequestMessage'},
+        'is_success_status_code': {'key': 'isSuccessStatusCode', 'type': 'bool'},
+    }
+
+    def __init__(
+        self,
+        *,
+        version: Optional[str] = None,
+        content: Optional["HttpContent"] = None,
+        status_code: Optional[Union[str, "HttpStatusCode"]] = None,
+        reason_phrase: Optional[str] = None,
+        request_message: Optional["HttpRequestMessage"] = None,
+        **kwargs
+    ):
+        """
+        :keyword version:
+        :paramtype version: str
+        :keyword content:
+        :paramtype content: ~azure.mgmt.machinelearningservices.models.HttpContent
+        :keyword status_code: Possible values include: "Continue", "SwitchingProtocols", "Processing",
+         "EarlyHints", "OK", "Created", "Accepted", "NonAuthoritativeInformation", "NoContent",
+         "ResetContent", "PartialContent", "MultiStatus", "AlreadyReported", "IMUsed", "Ambiguous",
+         "Moved", "Redirect", "RedirectMethod", "NotModified", "UseProxy", "Unused",
+         "TemporaryRedirect", "PermanentRedirect", "BadRequest", "Unauthorized", "PaymentRequired",
+         "Forbidden", "NotFound", "MethodNotAllowed", "NotAcceptable", "ProxyAuthenticationRequired",
+         "RequestTimeout", "Conflict", "Gone", "LengthRequired", "PreconditionFailed",
+         "RequestEntityTooLarge", "RequestUriTooLong", "UnsupportedMediaType",
+         "RequestedRangeNotSatisfiable", "ExpectationFailed", "MisdirectedRequest",
+         "UnprocessableEntity", "Locked", "FailedDependency", "UpgradeRequired", "PreconditionRequired",
+         "TooManyRequests", "RequestHeaderFieldsTooLarge", "UnavailableForLegalReasons",
+         "InternalServerError", "NotImplemented", "BadGateway", "ServiceUnavailable", "GatewayTimeout",
+         "HttpVersionNotSupported", "VariantAlsoNegotiates", "InsufficientStorage", "LoopDetected",
+         "NotExtended", "NetworkAuthenticationRequired".
+        :paramtype status_code: str or ~azure.mgmt.machinelearningservices.models.HttpStatusCode
+        :keyword reason_phrase:
+        :paramtype reason_phrase: str
+        :keyword request_message:
+        :paramtype request_message: ~azure.mgmt.machinelearningservices.models.HttpRequestMessage
+        """
+        super(HttpResponseMessage, self).__init__(**kwargs)
+        self.version = version
+        self.content = content
+        self.status_code = status_code
+        self.reason_phrase = reason_phrase
+        self.headers = None
+        self.trailing_headers = None
+        self.request_message = request_message
+        self.is_success_status_code = None
+
+
+class InnerErrorResponse(msrest.serialization.Model):
+    """A nested structure of errors.
+
+    :ivar code: The error code.
+    :vartype code: str
+    :ivar inner_error: A nested structure of errors.
+    :vartype inner_error: ~azure.mgmt.machinelearningservices.models.InnerErrorResponse
+    """
+
+    _attribute_map = {
+        'code': {'key': 'code', 'type': 'str'},
+        'inner_error': {'key': 'innerError', 'type': 'InnerErrorResponse'},
+    }
+
+    def __init__(
+        self,
+        *,
+        code: Optional[str] = None,
+        inner_error: Optional["InnerErrorResponse"] = None,
+        **kwargs
+    ):
+        """
+        :keyword code: The error code.
+        :paramtype code: str
+        :keyword inner_error: A nested structure of errors.
+        :paramtype inner_error: ~azure.mgmt.machinelearningservices.models.InnerErrorResponse
+        """
+        super(InnerErrorResponse, self).__init__(**kwargs)
+        self.code = code
+        self.inner_error = inner_error
+
+
+class KeyValuePairStringIEnumerable1(msrest.serialization.Model):
+    """KeyValuePairStringIEnumerable1.
+
+    :ivar key:
+    :vartype key: str
+    :ivar value:
+    :vartype value: list[str]
+    """
+
+    _attribute_map = {
+        'key': {'key': 'key', 'type': 'str'},
+        'value': {'key': 'value', 'type': '[str]'},
+    }
+
+    def __init__(
+        self,
+        *,
+        key: Optional[str] = None,
+        value: Optional[List[str]] = None,
+        **kwargs
+    ):
+        """
+        :keyword key:
+        :paramtype key: str
+        :keyword value:
+        :paramtype value: list[str]
+        """
+        super(KeyValuePairStringIEnumerable1, self).__init__(**kwargs)
+        self.key = key
+        self.value = value
+
+
+class LongRunningOperationResponse1LongRunningOperationResponseObject(msrest.serialization.Model):
+    """LongRunningOperationResponse1LongRunningOperationResponseObject.
+
+    :ivar completion_result: Anything.
+    :vartype completion_result: any
+    :ivar location:
+    :vartype location: str
+    :ivar operation_result:
+    :vartype operation_result: str
+    """
+
+    _attribute_map = {
+        'completion_result': {'key': 'completionResult', 'type': 'object'},
+        'location': {'key': 'location', 'type': 'str'},
+        'operation_result': {'key': 'operationResult', 'type': 'str'},
+    }
+
+    def __init__(
+        self,
+        *,
+        completion_result: Optional[Any] = None,
+        location: Optional[str] = None,
+        operation_result: Optional[str] = None,
+        **kwargs
+    ):
+        """
+        :keyword completion_result: Anything.
+        :paramtype completion_result: any
+        :keyword location:
+        :paramtype location: str
+        :keyword operation_result:
+        :paramtype operation_result: str
+        """
+        super(LongRunningOperationResponse1LongRunningOperationResponseObject, self).__init__(**kwargs)
+        self.completion_result = completion_result
+        self.location = location
+        self.operation_result = operation_result
+
+
+class Moments(msrest.serialization.Model):
+    """Moments.
+
+    :ivar mean:
+    :vartype mean: float
+    :ivar standard_deviation:
+    :vartype standard_deviation: float
+    :ivar variance:
+    :vartype variance: float
+    :ivar skewness:
+    :vartype skewness: float
+    :ivar kurtosis:
+    :vartype kurtosis: float
+    """
+
+    _attribute_map = {
+        'mean': {'key': 'mean', 'type': 'float'},
+        'standard_deviation': {'key': 'standardDeviation', 'type': 'float'},
+        'variance': {'key': 'variance', 'type': 'float'},
+        'skewness': {'key': 'skewness', 'type': 'float'},
+        'kurtosis': {'key': 'kurtosis', 'type': 'float'},
+    }
+
+    def __init__(
+        self,
+        *,
+        mean: Optional[float] = None,
+        standard_deviation: Optional[float] = None,
+        variance: Optional[float] = None,
+        skewness: Optional[float] = None,
+        kurtosis: Optional[float] = None,
+        **kwargs
+    ):
+        """
+        :keyword mean:
+        :paramtype mean: float
+        :keyword standard_deviation:
+        :paramtype standard_deviation: float
+        :keyword variance:
+        :paramtype variance: float
+        :keyword skewness:
+        :paramtype skewness: float
+        :keyword kurtosis:
+        :paramtype kurtosis: float
+        """
+        super(Moments, self).__init__(**kwargs)
+        self.mean = mean
+        self.standard_deviation = standard_deviation
+        self.variance = variance
+        self.skewness = skewness
+        self.kurtosis = kurtosis
+
+
+class PaginatedDataContainerEntityList(msrest.serialization.Model):
+    """A paginated list of DataContainerEntitys.
+
+    :ivar value: An array of objects of type DataContainerEntity.
+    :vartype value: list[~azure.mgmt.machinelearningservices.models.DataContainerEntity]
+    :ivar continuation_token: The token used in retrieving the next page. If null, there are no
+     additional pages.
+    :vartype continuation_token: str
+    :ivar next_link: The link to the next page constructed using the continuationToken.  If null,
+     there are no additional pages.
+    :vartype next_link: str
+    """
+
+    _attribute_map = {
+        'value': {'key': 'value', 'type': '[DataContainerEntity]'},
+        'continuation_token': {'key': 'continuationToken', 'type': 'str'},
+        'next_link': {'key': 'nextLink', 'type': 'str'},
+    }
+
+    def __init__(
+        self,
+        *,
+        value: Optional[List["DataContainerEntity"]] = None,
+        continuation_token: Optional[str] = None,
+        next_link: Optional[str] = None,
+        **kwargs
+    ):
+        """
+        :keyword value: An array of objects of type DataContainerEntity.
+        :paramtype value: list[~azure.mgmt.machinelearningservices.models.DataContainerEntity]
+        :keyword continuation_token: The token used in retrieving the next page. If null, there are no
+         additional pages.
+        :paramtype continuation_token: str
+        :keyword next_link: The link to the next page constructed using the continuationToken.  If
+         null, there are no additional pages.
+        :paramtype next_link: str
+        """
+        super(PaginatedDataContainerEntityList, self).__init__(**kwargs)
+        self.value = value
+        self.continuation_token = continuation_token
+        self.next_link = next_link
+
+
+class PaginatedDatasetDefinitionList(msrest.serialization.Model):
+    """A paginated list of DatasetDefinitions.
+
+    :ivar value: An array of objects of type DatasetDefinition.
+    :vartype value: list[~azure.mgmt.machinelearningservices.models.DatasetDefinition]
+    :ivar continuation_token: The token used in retrieving the next page. If null, there are no
+     additional pages.
+    :vartype continuation_token: str
+    :ivar next_link: The link to the next page constructed using the continuationToken.  If null,
+     there are no additional pages.
+    :vartype next_link: str
+    """
+
+    _attribute_map = {
+        'value': {'key': 'value', 'type': '[DatasetDefinition]'},
+        'continuation_token': {'key': 'continuationToken', 'type': 'str'},
+        'next_link': {'key': 'nextLink', 'type': 'str'},
+    }
+
+    def __init__(
+        self,
+        *,
+        value: Optional[List["DatasetDefinition"]] = None,
+        continuation_token: Optional[str] = None,
+        next_link: Optional[str] = None,
+        **kwargs
+    ):
+        """
+        :keyword value: An array of objects of type DatasetDefinition.
+        :paramtype value: list[~azure.mgmt.machinelearningservices.models.DatasetDefinition]
+        :keyword continuation_token: The token used in retrieving the next page. If null, there are no
+         additional pages.
+        :paramtype continuation_token: str
+        :keyword next_link: The link to the next page constructed using the continuationToken.  If
+         null, there are no additional pages.
+        :paramtype next_link: str
+        """
+        super(PaginatedDatasetDefinitionList, self).__init__(**kwargs)
+        self.value = value
+        self.continuation_token = continuation_token
+        self.next_link = next_link
+
+
+class PaginatedDatasetList(msrest.serialization.Model):
+    """A paginated list of Datasets.
+
+    :ivar value: An array of objects of type Dataset.
+    :vartype value: list[~azure.mgmt.machinelearningservices.models.Dataset]
+    :ivar continuation_token: The token used in retrieving the next page. If null, there are no
+     additional pages.
+    :vartype continuation_token: str
+    :ivar next_link: The link to the next page constructed using the continuationToken.  If null,
+     there are no additional pages.
+    :vartype next_link: str
+    """
+
+    _attribute_map = {
+        'value': {'key': 'value', 'type': '[Dataset]'},
+        'continuation_token': {'key': 'continuationToken', 'type': 'str'},
+        'next_link': {'key': 'nextLink', 'type': 'str'},
+    }
+
+    def __init__(
+        self,
+        *,
+        value: Optional[List["Dataset"]] = None,
+        continuation_token: Optional[str] = None,
+        next_link: Optional[str] = None,
+        **kwargs
+    ):
+        """
+        :keyword value: An array of objects of type Dataset.
+        :paramtype value: list[~azure.mgmt.machinelearningservices.models.Dataset]
+        :keyword continuation_token: The token used in retrieving the next page. If null, there are no
+         additional pages.
+        :paramtype continuation_token: str
+        :keyword next_link: The link to the next page constructed using the continuationToken.  If
+         null, there are no additional pages.
+        :paramtype next_link: str
+        """
+        super(PaginatedDatasetList, self).__init__(**kwargs)
+        self.value = value
+        self.continuation_token = continuation_token
+        self.next_link = next_link
+
+
+class PaginatedDatasetV2List(msrest.serialization.Model):
+    """A paginated list of DatasetV2s.
+
+    :ivar value: An array of objects of type DatasetV2.
+    :vartype value: list[~azure.mgmt.machinelearningservices.models.DatasetV2]
+    :ivar continuation_token: The token used in retrieving the next page. If null, there are no
+     additional pages.
+    :vartype continuation_token: str
+    :ivar next_link: The link to the next page constructed using the continuationToken.  If null,
+     there are no additional pages.
+    :vartype next_link: str
+    """
+
+    _attribute_map = {
+        'value': {'key': 'value', 'type': '[DatasetV2]'},
+        'continuation_token': {'key': 'continuationToken', 'type': 'str'},
+        'next_link': {'key': 'nextLink', 'type': 'str'},
+    }
+
+    def __init__(
+        self,
+        *,
+        value: Optional[List["DatasetV2"]] = None,
+        continuation_token: Optional[str] = None,
+        next_link: Optional[str] = None,
+        **kwargs
+    ):
+        """
+        :keyword value: An array of objects of type DatasetV2.
+        :paramtype value: list[~azure.mgmt.machinelearningservices.models.DatasetV2]
+        :keyword continuation_token: The token used in retrieving the next page. If null, there are no
+         additional pages.
+        :paramtype continuation_token: str
+        :keyword next_link: The link to the next page constructed using the continuationToken.  If
+         null, there are no additional pages.
+        :paramtype next_link: str
+        """
+        super(PaginatedDatasetV2List, self).__init__(**kwargs)
+        self.value = value
+        self.continuation_token = continuation_token
+        self.next_link = next_link
+
+
+class PaginatedDataVersionEntityList(msrest.serialization.Model):
+    """A paginated list of DataVersionEntitys.
+
+    :ivar value: An array of objects of type DataVersionEntity.
+    :vartype value: list[~azure.mgmt.machinelearningservices.models.DataVersionEntity]
+    :ivar continuation_token: The token used in retrieving the next page. If null, there are no
+     additional pages.
+    :vartype continuation_token: str
+    :ivar next_link: The link to the next page constructed using the continuationToken.  If null,
+     there are no additional pages.
+    :vartype next_link: str
+    """
+
+    _attribute_map = {
+        'value': {'key': 'value', 'type': '[DataVersionEntity]'},
+        'continuation_token': {'key': 'continuationToken', 'type': 'str'},
+        'next_link': {'key': 'nextLink', 'type': 'str'},
+    }
+
+    def __init__(
+        self,
+        *,
+        value: Optional[List["DataVersionEntity"]] = None,
+        continuation_token: Optional[str] = None,
+        next_link: Optional[str] = None,
+        **kwargs
+    ):
+        """
+        :keyword value: An array of objects of type DataVersionEntity.
+        :paramtype value: list[~azure.mgmt.machinelearningservices.models.DataVersionEntity]
+        :keyword continuation_token: The token used in retrieving the next page. If null, there are no
+         additional pages.
+        :paramtype continuation_token: str
+        :keyword next_link: The link to the next page constructed using the continuationToken.  If
+         null, there are no additional pages.
+        :paramtype next_link: str
+        """
+        super(PaginatedDataVersionEntityList, self).__init__(**kwargs)
+        self.value = value
+        self.continuation_token = continuation_token
+        self.next_link = next_link
+
+
+class PaginatedStringList(msrest.serialization.Model):
+    """A paginated list of Strings.
+
+    :ivar value: An array of objects of type String.
+    :vartype value: list[str]
+    :ivar continuation_token: The token used in retrieving the next page. If null, there are no
+     additional pages.
+    :vartype continuation_token: str
+    :ivar next_link: The link to the next page constructed using the continuationToken.  If null,
+     there are no additional pages.
+    :vartype next_link: str
+    """
+
+    _attribute_map = {
+        'value': {'key': 'value', 'type': '[str]'},
+        'continuation_token': {'key': 'continuationToken', 'type': 'str'},
+        'next_link': {'key': 'nextLink', 'type': 'str'},
+    }
+
+    def __init__(
+        self,
+        *,
+        value: Optional[List[str]] = None,
+        continuation_token: Optional[str] = None,
+        next_link: Optional[str] = None,
+        **kwargs
+    ):
+        """
+        :keyword value: An array of objects of type String.
+        :paramtype value: list[str]
+        :keyword continuation_token: The token used in retrieving the next page. If null, there are no
+         additional pages.
+        :paramtype continuation_token: str
+        :keyword next_link: The link to the next page constructed using the continuationToken.  If
+         null, there are no additional pages.
+        :paramtype next_link: str
+        """
+        super(PaginatedStringList, self).__init__(**kwargs)
+        self.value = value
+        self.continuation_token = continuation_token
+        self.next_link = next_link
+
+
+class ProfileActionResult(msrest.serialization.Model):
+    """ProfileActionResult.
+
+    :ivar profile_action_id:
+    :vartype profile_action_id: str
+    :ivar status:
+    :vartype status: str
+    :ivar completed_on_utc:
+    :vartype completed_on_utc: ~datetime.datetime
+    :ivar action_result:
+    :vartype action_result: ~azure.mgmt.machinelearningservices.models.ActionResult
+    """
+
+    _attribute_map = {
+        'profile_action_id': {'key': 'profileActionId', 'type': 'str'},
+        'status': {'key': 'status', 'type': 'str'},
+        'completed_on_utc': {'key': 'completedOnUtc', 'type': 'iso-8601'},
+        'action_result': {'key': 'actionResult', 'type': 'ActionResult'},
+    }
+
+    def __init__(
+        self,
+        *,
+        profile_action_id: Optional[str] = None,
+        status: Optional[str] = None,
+        completed_on_utc: Optional[datetime.datetime] = None,
+        action_result: Optional["ActionResult"] = None,
+        **kwargs
+    ):
+        """
+        :keyword profile_action_id:
+        :paramtype profile_action_id: str
+        :keyword status:
+        :paramtype status: str
+        :keyword completed_on_utc:
+        :paramtype completed_on_utc: ~datetime.datetime
+        :keyword action_result:
+        :paramtype action_result: ~azure.mgmt.machinelearningservices.models.ActionResult
+        """
+        super(ProfileActionResult, self).__init__(**kwargs)
+        self.profile_action_id = profile_action_id
+        self.status = status
+        self.completed_on_utc = completed_on_utc
+        self.action_result = action_result
+
+
+class ProfileResult(msrest.serialization.Model):
+    """ProfileResult.
+
+    :ivar column_name:
+    :vartype column_name: str
+    :ivar type: Possible values include: "String", "Boolean", "Integer", "Decimal", "Date",
+     "Unknown", "Error", "Null", "DataRow", "List", "Stream".
+    :vartype type: str or ~azure.mgmt.machinelearningservices.models.FieldType
+    :ivar min:
+    :vartype min: ~azure.mgmt.machinelearningservices.models.DataField
+    :ivar max:
+    :vartype max: ~azure.mgmt.machinelearningservices.models.DataField
+    :ivar count:
+    :vartype count: long
+    :ivar missing_count:
+    :vartype missing_count: long
+    :ivar not_missing_count:
+    :vartype not_missing_count: long
+    :ivar percent_missing:
+    :vartype percent_missing: float
+    :ivar error_count:
+    :vartype error_count: long
+    :ivar empty_count:
+    :vartype empty_count: long
+    :ivar quantiles:
+    :vartype quantiles: ~azure.mgmt.machinelearningservices.models.Quantiles
+    :ivar whisker_top:
+    :vartype whisker_top: float
+    :ivar whisker_bottom:
+    :vartype whisker_bottom: float
+    :ivar moments:
+    :vartype moments: ~azure.mgmt.machinelearningservices.models.Moments
+    :ivar type_counts:
+    :vartype type_counts: list[~azure.mgmt.machinelearningservices.models.TypeCount]
+    :ivar value_counts:
+    :vartype value_counts: list[~azure.mgmt.machinelearningservices.models.ValueCount]
+    :ivar unique_values:
+    :vartype unique_values: long
+    :ivar histogram:
+    :vartype histogram: list[~azure.mgmt.machinelearningservices.models.HistogramBin]
+    :ivar s_type_counts:
+    :vartype s_type_counts: list[~azure.mgmt.machinelearningservices.models.STypeCount]
+    :ivar average_spaces_count:
+    :vartype average_spaces_count: float
+    :ivar string_lengths:
+    :vartype string_lengths: list[~azure.mgmt.machinelearningservices.models.StringLengthCount]
+    """
+
+    _attribute_map = {
+        'column_name': {'key': 'columnName', 'type': 'str'},
+        'type': {'key': 'type', 'type': 'str'},
+        'min': {'key': 'min', 'type': 'DataField'},
+        'max': {'key': 'max', 'type': 'DataField'},
+        'count': {'key': 'count', 'type': 'long'},
+        'missing_count': {'key': 'missingCount', 'type': 'long'},
+        'not_missing_count': {'key': 'notMissingCount', 'type': 'long'},
+        'percent_missing': {'key': 'percentMissing', 'type': 'float'},
+        'error_count': {'key': 'errorCount', 'type': 'long'},
+        'empty_count': {'key': 'emptyCount', 'type': 'long'},
+        'quantiles': {'key': 'quantiles', 'type': 'Quantiles'},
+        'whisker_top': {'key': 'whiskerTop', 'type': 'float'},
+        'whisker_bottom': {'key': 'whiskerBottom', 'type': 'float'},
+        'moments': {'key': 'moments', 'type': 'Moments'},
+        'type_counts': {'key': 'typeCounts', 'type': '[TypeCount]'},
+        'value_counts': {'key': 'valueCounts', 'type': '[ValueCount]'},
+        'unique_values': {'key': 'uniqueValues', 'type': 'long'},
+        'histogram': {'key': 'histogram', 'type': '[HistogramBin]'},
+        's_type_counts': {'key': 'sTypeCounts', 'type': '[STypeCount]'},
+        'average_spaces_count': {'key': 'averageSpacesCount', 'type': 'float'},
+        'string_lengths': {'key': 'stringLengths', 'type': '[StringLengthCount]'},
+    }
+
+    def __init__(
+        self,
+        *,
+        column_name: Optional[str] = None,
+        type: Optional[Union[str, "FieldType"]] = None,
+        min: Optional["DataField"] = None,
+        max: Optional["DataField"] = None,
+        count: Optional[int] = None,
+        missing_count: Optional[int] = None,
+        not_missing_count: Optional[int] = None,
+        percent_missing: Optional[float] = None,
+        error_count: Optional[int] = None,
+        empty_count: Optional[int] = None,
+        quantiles: Optional["Quantiles"] = None,
+        whisker_top: Optional[float] = None,
+        whisker_bottom: Optional[float] = None,
+        moments: Optional["Moments"] = None,
+        type_counts: Optional[List["TypeCount"]] = None,
+        value_counts: Optional[List["ValueCount"]] = None,
+        unique_values: Optional[int] = None,
+        histogram: Optional[List["HistogramBin"]] = None,
+        s_type_counts: Optional[List["STypeCount"]] = None,
+        average_spaces_count: Optional[float] = None,
+        string_lengths: Optional[List["StringLengthCount"]] = None,
+        **kwargs
+    ):
+        """
+        :keyword column_name:
+        :paramtype column_name: str
+        :keyword type: Possible values include: "String", "Boolean", "Integer", "Decimal", "Date",
+         "Unknown", "Error", "Null", "DataRow", "List", "Stream".
+        :paramtype type: str or ~azure.mgmt.machinelearningservices.models.FieldType
+        :keyword min:
+        :paramtype min: ~azure.mgmt.machinelearningservices.models.DataField
+        :keyword max:
+        :paramtype max: ~azure.mgmt.machinelearningservices.models.DataField
+        :keyword count:
+        :paramtype count: long
+        :keyword missing_count:
+        :paramtype missing_count: long
+        :keyword not_missing_count:
+        :paramtype not_missing_count: long
+        :keyword percent_missing:
+        :paramtype percent_missing: float
+        :keyword error_count:
+        :paramtype error_count: long
+        :keyword empty_count:
+        :paramtype empty_count: long
+        :keyword quantiles:
+        :paramtype quantiles: ~azure.mgmt.machinelearningservices.models.Quantiles
+        :keyword whisker_top:
+        :paramtype whisker_top: float
+        :keyword whisker_bottom:
+        :paramtype whisker_bottom: float
+        :keyword moments:
+        :paramtype moments: ~azure.mgmt.machinelearningservices.models.Moments
+        :keyword type_counts:
+        :paramtype type_counts: list[~azure.mgmt.machinelearningservices.models.TypeCount]
+        :keyword value_counts:
+        :paramtype value_counts: list[~azure.mgmt.machinelearningservices.models.ValueCount]
+        :keyword unique_values:
+        :paramtype unique_values: long
+        :keyword histogram:
+        :paramtype histogram: list[~azure.mgmt.machinelearningservices.models.HistogramBin]
+        :keyword s_type_counts:
+        :paramtype s_type_counts: list[~azure.mgmt.machinelearningservices.models.STypeCount]
+        :keyword average_spaces_count:
+        :paramtype average_spaces_count: float
+        :keyword string_lengths:
+        :paramtype string_lengths: list[~azure.mgmt.machinelearningservices.models.StringLengthCount]
+        """
+        super(ProfileResult, self).__init__(**kwargs)
+        self.column_name = column_name
+        self.type = type
+        self.min = min
+        self.max = max
+        self.count = count
+        self.missing_count = missing_count
+        self.not_missing_count = not_missing_count
+        self.percent_missing = percent_missing
+        self.error_count = error_count
+        self.empty_count = empty_count
+        self.quantiles = quantiles
+        self.whisker_top = whisker_top
+        self.whisker_bottom = whisker_bottom
+        self.moments = moments
+        self.type_counts = type_counts
+        self.value_counts = value_counts
+        self.unique_values = unique_values
+        self.histogram = histogram
+        self.s_type_counts = s_type_counts
+        self.average_spaces_count = average_spaces_count
+        self.string_lengths = string_lengths
+
+
+class Quantiles(msrest.serialization.Model):
+    """Quantiles.
+
+    :ivar p0_d1:
+    :vartype p0_d1: float
+    :ivar p1:
+    :vartype p1: float
+    :ivar p5:
+    :vartype p5: float
+    :ivar p25:
+    :vartype p25: float
+    :ivar p50:
+    :vartype p50: float
+    :ivar p75:
+    :vartype p75: float
+    :ivar p95:
+    :vartype p95: float
+    :ivar p99:
+    :vartype p99: float
+    :ivar p99_d9:
+    :vartype p99_d9: float
+    """
+
+    _attribute_map = {
+        'p0_d1': {'key': 'p0D1', 'type': 'float'},
+        'p1': {'key': 'p1', 'type': 'float'},
+        'p5': {'key': 'p5', 'type': 'float'},
+        'p25': {'key': 'p25', 'type': 'float'},
+        'p50': {'key': 'p50', 'type': 'float'},
+        'p75': {'key': 'p75', 'type': 'float'},
+        'p95': {'key': 'p95', 'type': 'float'},
+        'p99': {'key': 'p99', 'type': 'float'},
+        'p99_d9': {'key': 'p99D9', 'type': 'float'},
+    }
+
+    def __init__(
+        self,
+        *,
+        p0_d1: Optional[float] = None,
+        p1: Optional[float] = None,
+        p5: Optional[float] = None,
+        p25: Optional[float] = None,
+        p50: Optional[float] = None,
+        p75: Optional[float] = None,
+        p95: Optional[float] = None,
+        p99: Optional[float] = None,
+        p99_d9: Optional[float] = None,
+        **kwargs
+    ):
+        """
+        :keyword p0_d1:
+        :paramtype p0_d1: float
+        :keyword p1:
+        :paramtype p1: float
+        :keyword p5:
+        :paramtype p5: float
+        :keyword p25:
+        :paramtype p25: float
+        :keyword p50:
+        :paramtype p50: float
+        :keyword p75:
+        :paramtype p75: float
+        :keyword p95:
+        :paramtype p95: float
+        :keyword p99:
+        :paramtype p99: float
+        :keyword p99_d9:
+        :paramtype p99_d9: float
+        """
+        super(Quantiles, self).__init__(**kwargs)
+        self.p0_d1 = p0_d1
+        self.p1 = p1
+        self.p5 = p5
+        self.p25 = p25
+        self.p50 = p50
+        self.p75 = p75
+        self.p95 = p95
+        self.p99 = p99
+        self.p99_d9 = p99_d9
+
+
+class RegisterExistingData(msrest.serialization.Model):
+    """RegisterExistingData.
+
+    All required parameters must be populated in order to send to Azure.
+
+    :ivar existing_unregistered_asset_id: Required.
+    :vartype existing_unregistered_asset_id: str
+    :ivar name: Required.
+    :vartype name: str
+    :ivar version:
+    :vartype version: str
+    """
+
+    _validation = {
+        'existing_unregistered_asset_id': {'required': True},
+        'name': {'required': True},
+    }
+
+    _attribute_map = {
+        'existing_unregistered_asset_id': {'key': 'existingUnregisteredAssetId', 'type': 'str'},
+        'name': {'key': 'name', 'type': 'str'},
+        'version': {'key': 'version', 'type': 'str'},
+    }
+
+    def __init__(
+        self,
+        *,
+        existing_unregistered_asset_id: str,
+        name: str,
+        version: Optional[str] = None,
+        **kwargs
+    ):
+        """
+        :keyword existing_unregistered_asset_id: Required.
+        :paramtype existing_unregistered_asset_id: str
+        :keyword name: Required.
+        :paramtype name: str
+        :keyword version:
+        :paramtype version: str
+        """
+        super(RegisterExistingData, self).__init__(**kwargs)
+        self.existing_unregistered_asset_id = existing_unregistered_asset_id
+        self.name = name
+        self.version = version
+
+
+class RootError(msrest.serialization.Model):
+    """The root error.
+
+    :ivar code: The service-defined error code. Supported error codes: ServiceError, UserError,
+     ValidationError, AzureStorageError, TransientError, RequestThrottled.
+    :vartype code: str
+    :ivar severity: The Severity of error.
+    :vartype severity: int
+    :ivar message: A human-readable representation of the error.
+    :vartype message: str
+    :ivar message_format: An unformatted version of the message with no variable substitution.
+    :vartype message_format: str
+    :ivar message_parameters: Value substitutions corresponding to the contents of MessageFormat.
+    :vartype message_parameters: dict[str, str]
+    :ivar reference_code: This code can optionally be set by the system generating the error.
+     It should be used to classify the problem and identify the module and code area where the
+     failure occured.
+    :vartype reference_code: str
+    :ivar details_uri: A URI which points to more details about the context of the error.
+    :vartype details_uri: str
+    :ivar target: The target of the error (e.g., the name of the property in error).
+    :vartype target: str
+    :ivar details: The related errors that occurred during the request.
+    :vartype details: list[~azure.mgmt.machinelearningservices.models.RootError]
+    :ivar inner_error: A nested structure of errors.
+    :vartype inner_error: ~azure.mgmt.machinelearningservices.models.InnerErrorResponse
+    :ivar additional_info: The error additional info.
+    :vartype additional_info: list[~azure.mgmt.machinelearningservices.models.ErrorAdditionalInfo]
+    """
+
+    _attribute_map = {
+        'code': {'key': 'code', 'type': 'str'},
+        'severity': {'key': 'severity', 'type': 'int'},
+        'message': {'key': 'message', 'type': 'str'},
+        'message_format': {'key': 'messageFormat', 'type': 'str'},
+        'message_parameters': {'key': 'messageParameters', 'type': '{str}'},
+        'reference_code': {'key': 'referenceCode', 'type': 'str'},
+        'details_uri': {'key': 'detailsUri', 'type': 'str'},
+        'target': {'key': 'target', 'type': 'str'},
+        'details': {'key': 'details', 'type': '[RootError]'},
+        'inner_error': {'key': 'innerError', 'type': 'InnerErrorResponse'},
+        'additional_info': {'key': 'additionalInfo', 'type': '[ErrorAdditionalInfo]'},
+    }
+
+    def __init__(
+        self,
+        *,
+        code: Optional[str] = None,
+        severity: Optional[int] = None,
+        message: Optional[str] = None,
+        message_format: Optional[str] = None,
+        message_parameters: Optional[Dict[str, str]] = None,
+        reference_code: Optional[str] = None,
+        details_uri: Optional[str] = None,
+        target: Optional[str] = None,
+        details: Optional[List["RootError"]] = None,
+        inner_error: Optional["InnerErrorResponse"] = None,
+        additional_info: Optional[List["ErrorAdditionalInfo"]] = None,
+        **kwargs
+    ):
+        """
+        :keyword code: The service-defined error code. Supported error codes: ServiceError, UserError,
+         ValidationError, AzureStorageError, TransientError, RequestThrottled.
+        :paramtype code: str
+        :keyword severity: The Severity of error.
+        :paramtype severity: int
+        :keyword message: A human-readable representation of the error.
+        :paramtype message: str
+        :keyword message_format: An unformatted version of the message with no variable substitution.
+        :paramtype message_format: str
+        :keyword message_parameters: Value substitutions corresponding to the contents of
+         MessageFormat.
+        :paramtype message_parameters: dict[str, str]
+        :keyword reference_code: This code can optionally be set by the system generating the error.
+         It should be used to classify the problem and identify the module and code area where the
+         failure occured.
+        :paramtype reference_code: str
+        :keyword details_uri: A URI which points to more details about the context of the error.
+        :paramtype details_uri: str
+        :keyword target: The target of the error (e.g., the name of the property in error).
+        :paramtype target: str
+        :keyword details: The related errors that occurred during the request.
+        :paramtype details: list[~azure.mgmt.machinelearningservices.models.RootError]
+        :keyword inner_error: A nested structure of errors.
+        :paramtype inner_error: ~azure.mgmt.machinelearningservices.models.InnerErrorResponse
+        :keyword additional_info: The error additional info.
+        :paramtype additional_info:
+         list[~azure.mgmt.machinelearningservices.models.ErrorAdditionalInfo]
+        """
+        super(RootError, self).__init__(**kwargs)
+        self.code = code
+        self.severity = severity
+        self.message = message
+        self.message_format = message_format
+        self.message_parameters = message_parameters
+        self.reference_code = reference_code
+        self.details_uri = details_uri
+        self.target = target
+        self.details = details
+        self.inner_error = inner_error
+        self.additional_info = additional_info
+
+
+class SqlDataPath(msrest.serialization.Model):
+    """SqlDataPath.
+
+    :ivar sql_table_name:
+    :vartype sql_table_name: str
+    :ivar sql_query:
+    :vartype sql_query: str
+    :ivar sql_stored_procedure_name:
+    :vartype sql_stored_procedure_name: str
+    :ivar sql_stored_procedure_params:
+    :vartype sql_stored_procedure_params:
+     list[~azure.mgmt.machinelearningservices.models.StoredProcedureParameter]
+    :ivar query_timeout:
+    :vartype query_timeout: long
+    """
+
+    _attribute_map = {
+        'sql_table_name': {'key': 'sqlTableName', 'type': 'str'},
+        'sql_query': {'key': 'sqlQuery', 'type': 'str'},
+        'sql_stored_procedure_name': {'key': 'sqlStoredProcedureName', 'type': 'str'},
+        'sql_stored_procedure_params': {'key': 'sqlStoredProcedureParams', 'type': '[StoredProcedureParameter]'},
+        'query_timeout': {'key': 'queryTimeout', 'type': 'long'},
+    }
+
+    def __init__(
+        self,
+        *,
+        sql_table_name: Optional[str] = None,
+        sql_query: Optional[str] = None,
+        sql_stored_procedure_name: Optional[str] = None,
+        sql_stored_procedure_params: Optional[List["StoredProcedureParameter"]] = None,
+        query_timeout: Optional[int] = None,
+        **kwargs
+    ):
+        """
+        :keyword sql_table_name:
+        :paramtype sql_table_name: str
+        :keyword sql_query:
+        :paramtype sql_query: str
+        :keyword sql_stored_procedure_name:
+        :paramtype sql_stored_procedure_name: str
+        :keyword sql_stored_procedure_params:
+        :paramtype sql_stored_procedure_params:
+         list[~azure.mgmt.machinelearningservices.models.StoredProcedureParameter]
+        :keyword query_timeout:
+        :paramtype query_timeout: long
+        """
+        super(SqlDataPath, self).__init__(**kwargs)
+        self.sql_table_name = sql_table_name
+        self.sql_query = sql_query
+        self.sql_stored_procedure_name = sql_stored_procedure_name
+        self.sql_stored_procedure_params = sql_stored_procedure_params
+        self.query_timeout = query_timeout
+
+
+class StoredProcedureParameter(msrest.serialization.Model):
+    """StoredProcedureParameter.
+
+    :ivar name:
+    :vartype name: str
+    :ivar value:
+    :vartype value: str
+    :ivar type: Possible values include: "String", "Int", "Decimal", "Guid", "Boolean", "Date".
+    :vartype type: str or ~azure.mgmt.machinelearningservices.models.StoredProcedureParameterType
+    """
+
+    _attribute_map = {
+        'name': {'key': 'name', 'type': 'str'},
+        'value': {'key': 'value', 'type': 'str'},
+        'type': {'key': 'type', 'type': 'str'},
+    }
+
+    def __init__(
+        self,
+        *,
+        name: Optional[str] = None,
+        value: Optional[str] = None,
+        type: Optional[Union[str, "StoredProcedureParameterType"]] = None,
+        **kwargs
+    ):
+        """
+        :keyword name:
+        :paramtype name: str
+        :keyword value:
+        :paramtype value: str
+        :keyword type: Possible values include: "String", "Int", "Decimal", "Guid", "Boolean", "Date".
+        :paramtype type: str or ~azure.mgmt.machinelearningservices.models.StoredProcedureParameterType
+        """
+        super(StoredProcedureParameter, self).__init__(**kwargs)
+        self.name = name
+        self.value = value
+        self.type = type
+
+
+class StringLengthCount(msrest.serialization.Model):
+    """StringLengthCount.
+
+    :ivar length:
+    :vartype length: long
+    :ivar count:
+    :vartype count: long
+    """
+
+    _attribute_map = {
+        'length': {'key': 'length', 'type': 'long'},
+        'count': {'key': 'count', 'type': 'long'},
+    }
+
+    def __init__(
+        self,
+        *,
+        length: Optional[int] = None,
+        count: Optional[int] = None,
+        **kwargs
+    ):
+        """
+        :keyword length:
+        :paramtype length: long
+        :keyword count:
+        :paramtype count: long
+        """
+        super(StringLengthCount, self).__init__(**kwargs)
+        self.length = length
+        self.count = count
+
+
+class STypeCount(msrest.serialization.Model):
+    """STypeCount.
+
+    :ivar s_type: Possible values include: "EmailAddress", "GeographicCoordinate", "Ipv4Address",
+     "Ipv6Address", "UsPhoneNumber", "ZipCode".
+    :vartype s_type: str or ~azure.mgmt.machinelearningservices.models.SType
+    :ivar count:
+    :vartype count: long
+    """
+
+    _attribute_map = {
+        's_type': {'key': 'sType', 'type': 'str'},
+        'count': {'key': 'count', 'type': 'long'},
+    }
+
+    def __init__(
+        self,
+        *,
+        s_type: Optional[Union[str, "SType"]] = None,
+        count: Optional[int] = None,
+        **kwargs
+    ):
+        """
+        :keyword s_type: Possible values include: "EmailAddress", "GeographicCoordinate",
+         "Ipv4Address", "Ipv6Address", "UsPhoneNumber", "ZipCode".
+        :paramtype s_type: str or ~azure.mgmt.machinelearningservices.models.SType
+        :keyword count:
+        :paramtype count: long
+        """
+        super(STypeCount, self).__init__(**kwargs)
+        self.s_type = s_type
+        self.count = count
+
+
+class TypeCount(msrest.serialization.Model):
+    """TypeCount.
+
+    :ivar type: Possible values include: "String", "Boolean", "Integer", "Decimal", "Date",
+     "Unknown", "Error", "Null", "DataRow", "List", "Stream".
+    :vartype type: str or ~azure.mgmt.machinelearningservices.models.FieldType
+    :ivar count:
+    :vartype count: long
+    """
+
+    _attribute_map = {
+        'type': {'key': 'type', 'type': 'str'},
+        'count': {'key': 'count', 'type': 'long'},
+    }
+
+    def __init__(
+        self,
+        *,
+        type: Optional[Union[str, "FieldType"]] = None,
+        count: Optional[int] = None,
+        **kwargs
+    ):
+        """
+        :keyword type: Possible values include: "String", "Boolean", "Integer", "Decimal", "Date",
+         "Unknown", "Error", "Null", "DataRow", "List", "Stream".
+        :paramtype type: str or ~azure.mgmt.machinelearningservices.models.FieldType
+        :keyword count:
+        :paramtype count: long
+        """
+        super(TypeCount, self).__init__(**kwargs)
+        self.type = type
+        self.count = count
+
+
+class User(msrest.serialization.Model):
+    """User.
+
+    :ivar user_object_id: A user or service principal's object ID.
+     This is EUPI and may only be logged to warm path telemetry.
+    :vartype user_object_id: str
+    :ivar user_pu_id: A user or service principal's PuID.
+     This is PII and should never be logged.
+    :vartype user_pu_id: str
+    :ivar user_idp: A user identity provider. Eg live.com
+     This is PII and should never be logged.
+    :vartype user_idp: str
+    :ivar user_alt_sec_id: A user alternate sec id. This represents the user in a different
+     identity provider system Eg.1:live.com:puid
+     This is PII and should never be logged.
+    :vartype user_alt_sec_id: str
+    :ivar user_iss: The issuer which issed the token for this user.
+     This is PII and should never be logged.
+    :vartype user_iss: str
+    :ivar user_tenant_id: A user or service principal's tenant ID.
+    :vartype user_tenant_id: str
+    :ivar user_name: A user's full name or a service principal's app ID.
+     This is PII and should never be logged.
+    :vartype user_name: str
+    :ivar upn: A user's Principal name (upn)
+     This is PII andshould never be logged.
+    :vartype upn: str
+    """
+
+    _attribute_map = {
+        'user_object_id': {'key': 'userObjectId', 'type': 'str'},
+        'user_pu_id': {'key': 'userPuId', 'type': 'str'},
+        'user_idp': {'key': 'userIdp', 'type': 'str'},
+        'user_alt_sec_id': {'key': 'userAltSecId', 'type': 'str'},
+        'user_iss': {'key': 'userIss', 'type': 'str'},
+        'user_tenant_id': {'key': 'userTenantId', 'type': 'str'},
+        'user_name': {'key': 'userName', 'type': 'str'},
+        'upn': {'key': 'upn', 'type': 'str'},
+    }
+
+    def __init__(
+        self,
+        *,
+        user_object_id: Optional[str] = None,
+        user_pu_id: Optional[str] = None,
+        user_idp: Optional[str] = None,
+        user_alt_sec_id: Optional[str] = None,
+        user_iss: Optional[str] = None,
+        user_tenant_id: Optional[str] = None,
+        user_name: Optional[str] = None,
+        upn: Optional[str] = None,
+        **kwargs
+    ):
+        """
+        :keyword user_object_id: A user or service principal's object ID.
+         This is EUPI and may only be logged to warm path telemetry.
+        :paramtype user_object_id: str
+        :keyword user_pu_id: A user or service principal's PuID.
+         This is PII and should never be logged.
+        :paramtype user_pu_id: str
+        :keyword user_idp: A user identity provider. Eg live.com
+         This is PII and should never be logged.
+        :paramtype user_idp: str
+        :keyword user_alt_sec_id: A user alternate sec id. This represents the user in a different
+         identity provider system Eg.1:live.com:puid
+         This is PII and should never be logged.
+        :paramtype user_alt_sec_id: str
+        :keyword user_iss: The issuer which issed the token for this user.
+         This is PII and should never be logged.
+        :paramtype user_iss: str
+        :keyword user_tenant_id: A user or service principal's tenant ID.
+        :paramtype user_tenant_id: str
+        :keyword user_name: A user's full name or a service principal's app ID.
+         This is PII and should never be logged.
+        :paramtype user_name: str
+        :keyword upn: A user's Principal name (upn)
+         This is PII andshould never be logged.
+        :paramtype upn: str
+        """
+        super(User, self).__init__(**kwargs)
+        self.user_object_id = user_object_id
+        self.user_pu_id = user_pu_id
+        self.user_idp = user_idp
+        self.user_alt_sec_id = user_alt_sec_id
+        self.user_iss = user_iss
+        self.user_tenant_id = user_tenant_id
+        self.user_name = user_name
+        self.upn = upn
+
+
+class ValueCount(msrest.serialization.Model):
+    """ValueCount.
+
+    :ivar value:
+    :vartype value: ~azure.mgmt.machinelearningservices.models.DataField
+    :ivar count:
+    :vartype count: long
+    """
+
+    _attribute_map = {
+        'value': {'key': 'value', 'type': 'DataField'},
+        'count': {'key': 'count', 'type': 'long'},
+    }
+
+    def __init__(
+        self,
+        *,
+        value: Optional["DataField"] = None,
+        count: Optional[int] = None,
+        **kwargs
+    ):
+        """
+        :keyword value:
+        :paramtype value: ~azure.mgmt.machinelearningservices.models.DataField
+        :keyword count:
+        :paramtype count: long
+        """
+        super(ValueCount, self).__init__(**kwargs)
+        self.value = value
+        self.count = count
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/dataset_dataplane/operations/__init__.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/dataset_dataplane/operations/__init__.py
new file mode 100644
index 00000000..f0340813
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/dataset_dataplane/operations/__init__.py
@@ -0,0 +1,27 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from ._data_call_operations import DataCallOperations
+from ._data_container_operations import DataContainerOperations
+from ._delete_operations import DeleteOperations
+from ._datasets_v1_operations import DatasetsV1Operations
+from ._dataset_controller_v2_operations import DatasetControllerV2Operations
+from ._dataset_v2_operations import DatasetV2Operations
+from ._data_version_operations import DataVersionOperations
+from ._get_operation_status_operations import GetOperationStatusOperations
+
+__all__ = [
+    'DataCallOperations',
+    'DataContainerOperations',
+    'DeleteOperations',
+    'DatasetsV1Operations',
+    'DatasetControllerV2Operations',
+    'DatasetV2Operations',
+    'DataVersionOperations',
+    'GetOperationStatusOperations',
+]
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/dataset_dataplane/operations/_data_call_operations.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/dataset_dataplane/operations/_data_call_operations.py
new file mode 100644
index 00000000..4e7865d1
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/dataset_dataplane/operations/_data_call_operations.py
@@ -0,0 +1,356 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+import functools
+from typing import TYPE_CHECKING
+import warnings
+
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import HttpResponse
+from azure.core.rest import HttpRequest
+from azure.core.tracing.decorator import distributed_trace
+from azure.mgmt.core.exceptions import ARMErrorFormat
+from msrest import Serializer
+
+from .. import models as _models
+from .._vendor import _convert_request, _format_url_section
+
+if TYPE_CHECKING:
+    # pylint: disable=unused-import,ungrouped-imports
+    from typing import Any, Callable, Dict, Generic, List, Optional, TypeVar
+    T = TypeVar('T')
+    ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
+
+_SERIALIZER = Serializer()
+_SERIALIZER.client_side_validation = False
+# fmt: off
+
+def build_get_schema_for_ml_table_request(
+    subscription_id,  # type: str
+    resource_group_name,  # type: str
+    workspace_name,  # type: str
+    **kwargs  # type: Any
+):
+    # type: (...) -> HttpRequest
+    content_type = kwargs.pop('content_type', None)  # type: Optional[str]
+
+    accept = "application/json"
+    # Construct URL
+    url = kwargs.pop("template_url", '/data/v2.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datacall/schema')
+    path_format_arguments = {
+        "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
+        "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
+        "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'),
+    }
+
+    url = _format_url_section(url, **path_format_arguments)
+
+    # Construct headers
+    header_parameters = kwargs.pop("headers", {})  # type: Dict[str, Any]
+    if content_type is not None:
+        header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str')
+    header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
+
+    return HttpRequest(
+        method="POST",
+        url=url,
+        headers=header_parameters,
+        **kwargs
+    )
+
+
+def build_get_preview_for_ml_table_request(
+    subscription_id,  # type: str
+    resource_group_name,  # type: str
+    workspace_name,  # type: str
+    **kwargs  # type: Any
+):
+    # type: (...) -> HttpRequest
+    content_type = kwargs.pop('content_type', None)  # type: Optional[str]
+
+    accept = "application/json"
+    # Construct URL
+    url = kwargs.pop("template_url", '/data/v2.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datacall/preview')
+    path_format_arguments = {
+        "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
+        "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
+        "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'),
+    }
+
+    url = _format_url_section(url, **path_format_arguments)
+
+    # Construct headers
+    header_parameters = kwargs.pop("headers", {})  # type: Dict[str, Any]
+    if content_type is not None:
+        header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str')
+    header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
+
+    return HttpRequest(
+        method="POST",
+        url=url,
+        headers=header_parameters,
+        **kwargs
+    )
+
+
+def build_get_quick_profile_for_ml_table_request(
+    subscription_id,  # type: str
+    resource_group_name,  # type: str
+    workspace_name,  # type: str
+    **kwargs  # type: Any
+):
+    # type: (...) -> HttpRequest
+    content_type = kwargs.pop('content_type', None)  # type: Optional[str]
+
+    accept = "application/json"
+    # Construct URL
+    url = kwargs.pop("template_url", '/data/v2.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datacall/quickprofile')
+    path_format_arguments = {
+        "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
+        "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
+        "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'),
+    }
+
+    url = _format_url_section(url, **path_format_arguments)
+
+    # Construct headers
+    header_parameters = kwargs.pop("headers", {})  # type: Dict[str, Any]
+    if content_type is not None:
+        header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str')
+    header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
+
+    return HttpRequest(
+        method="POST",
+        url=url,
+        headers=header_parameters,
+        **kwargs
+    )
+
+# fmt: on
+class DataCallOperations(object):
+    """DataCallOperations operations.
+
+    You should not instantiate this class directly. Instead, you should create a Client instance that
+    instantiates it for you and attaches it as an attribute.
+
+    :ivar models: Alias to model classes used in this operation group.
+    :type models: ~azure.mgmt.machinelearningservices.models
+    :param client: Client for service requests.
+    :param config: Configuration of service client.
+    :param serializer: An object model serializer.
+    :param deserializer: An object model deserializer.
+    """
+
+    models = _models
+
+    def __init__(self, client, config, serializer, deserializer):
+        self._client = client
+        self._serialize = serializer
+        self._deserialize = deserializer
+        self._config = config
+
+    @distributed_trace
+    def get_schema_for_ml_table(
+        self,
+        subscription_id,  # type: str
+        resource_group_name,  # type: str
+        workspace_name,  # type: str
+        body=None,  # type: Optional["_models.DataCallRequest"]
+        **kwargs  # type: Any
+    ):
+        # type: (...) -> List["_models.ColumnDefinition"]
+        """Get schema for a specific MLTable.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param body:
+        :type body: ~azure.mgmt.machinelearningservices.models.DataCallRequest
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: list of ColumnDefinition, or the result of cls(response)
+        :rtype: list[~azure.mgmt.machinelearningservices.models.ColumnDefinition]
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType[List["_models.ColumnDefinition"]]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        content_type = kwargs.pop('content_type', "application/json")  # type: Optional[str]
+
+        if body is not None:
+            _json = self._serialize.body(body, 'DataCallRequest')
+        else:
+            _json = None
+
+        request = build_get_schema_for_ml_table_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            content_type=content_type,
+            json=_json,
+            template_url=self.get_schema_for_ml_table.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('[ColumnDefinition]', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    get_schema_for_ml_table.metadata = {'url': '/data/v2.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datacall/schema'}  # type: ignore
+
+
+    @distributed_trace
+    def get_preview_for_ml_table(
+        self,
+        subscription_id,  # type: str
+        resource_group_name,  # type: str
+        workspace_name,  # type: str
+        body=None,  # type: Optional["_models.DataCallRequest"]
+        **kwargs  # type: Any
+    ):
+        # type: (...) -> "_models.DataViewSetResult"
+        """Get preview for a specific MLTable.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param body:
+        :type body: ~azure.mgmt.machinelearningservices.models.DataCallRequest
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: DataViewSetResult, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.DataViewSetResult
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.DataViewSetResult"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        content_type = kwargs.pop('content_type', "application/json")  # type: Optional[str]
+
+        if body is not None:
+            _json = self._serialize.body(body, 'DataCallRequest')
+        else:
+            _json = None
+
+        request = build_get_preview_for_ml_table_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            content_type=content_type,
+            json=_json,
+            template_url=self.get_preview_for_ml_table.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('DataViewSetResult', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    get_preview_for_ml_table.metadata = {'url': '/data/v2.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datacall/preview'}  # type: ignore
+
+
+    @distributed_trace
+    def get_quick_profile_for_ml_table(
+        self,
+        subscription_id,  # type: str
+        resource_group_name,  # type: str
+        workspace_name,  # type: str
+        body=None,  # type: Optional["_models.DataCallRequest"]
+        **kwargs  # type: Any
+    ):
+        # type: (...) -> List["_models.ProfileResult"]
+        """Get quick profile for a specific MLTable.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param body:
+        :type body: ~azure.mgmt.machinelearningservices.models.DataCallRequest
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: list of ProfileResult, or the result of cls(response)
+        :rtype: list[~azure.mgmt.machinelearningservices.models.ProfileResult]
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType[List["_models.ProfileResult"]]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        content_type = kwargs.pop('content_type', "application/json")  # type: Optional[str]
+
+        if body is not None:
+            _json = self._serialize.body(body, 'DataCallRequest')
+        else:
+            _json = None
+
+        request = build_get_quick_profile_for_ml_table_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            content_type=content_type,
+            json=_json,
+            template_url=self.get_quick_profile_for_ml_table.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('[ProfileResult]', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    get_quick_profile_for_ml_table.metadata = {'url': '/data/v2.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datacall/quickprofile'}  # type: ignore
+
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/dataset_dataplane/operations/_data_container_operations.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/dataset_dataplane/operations/_data_container_operations.py
new file mode 100644
index 00000000..ac3af23d
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/dataset_dataplane/operations/_data_container_operations.py
@@ -0,0 +1,464 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+import functools
+from typing import TYPE_CHECKING
+import warnings
+
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.paging import ItemPaged
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import HttpResponse
+from azure.core.rest import HttpRequest
+from azure.core.tracing.decorator import distributed_trace
+from azure.mgmt.core.exceptions import ARMErrorFormat
+from msrest import Serializer
+
+from .. import models as _models
+from .._vendor import _convert_request, _format_url_section
+
+if TYPE_CHECKING:
+    # pylint: disable=unused-import,ungrouped-imports
+    from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar
+    T = TypeVar('T')
+    ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
+
+_SERIALIZER = Serializer()
+_SERIALIZER.client_side_validation = False
+# fmt: off
+
+def build_create_data_container_request(
+    subscription_id,  # type: str
+    resource_group_name,  # type: str
+    workspace_name,  # type: str
+    **kwargs  # type: Any
+):
+    # type: (...) -> HttpRequest
+    content_type = kwargs.pop('content_type', None)  # type: Optional[str]
+
+    accept = "application/json"
+    # Construct URL
+    url = kwargs.pop("template_url", '/data/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datacontainer')
+    path_format_arguments = {
+        "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
+        "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
+        "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'),
+    }
+
+    url = _format_url_section(url, **path_format_arguments)
+
+    # Construct headers
+    header_parameters = kwargs.pop("headers", {})  # type: Dict[str, Any]
+    if content_type is not None:
+        header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str')
+    header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
+
+    return HttpRequest(
+        method="POST",
+        url=url,
+        headers=header_parameters,
+        **kwargs
+    )
+
+
+def build_list_data_container_request(
+    subscription_id,  # type: str
+    resource_group_name,  # type: str
+    workspace_name,  # type: str
+    **kwargs  # type: Any
+):
+    # type: (...) -> HttpRequest
+    accept = "application/json"
+    # Construct URL
+    url = kwargs.pop("template_url", '/data/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datacontainer')
+    path_format_arguments = {
+        "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
+        "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
+        "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'),
+    }
+
+    url = _format_url_section(url, **path_format_arguments)
+
+    # Construct headers
+    header_parameters = kwargs.pop("headers", {})  # type: Dict[str, Any]
+    header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
+
+    return HttpRequest(
+        method="GET",
+        url=url,
+        headers=header_parameters,
+        **kwargs
+    )
+
+
+def build_get_data_container_request(
+    subscription_id,  # type: str
+    resource_group_name,  # type: str
+    workspace_name,  # type: str
+    name,  # type: str
+    **kwargs  # type: Any
+):
+    # type: (...) -> HttpRequest
+    accept = "application/json"
+    # Construct URL
+    url = kwargs.pop("template_url", '/data/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datacontainer/{name}')
+    path_format_arguments = {
+        "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
+        "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
+        "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'),
+        "name": _SERIALIZER.url("name", name, 'str'),
+    }
+
+    url = _format_url_section(url, **path_format_arguments)
+
+    # Construct headers
+    header_parameters = kwargs.pop("headers", {})  # type: Dict[str, Any]
+    header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
+
+    return HttpRequest(
+        method="GET",
+        url=url,
+        headers=header_parameters,
+        **kwargs
+    )
+
+
+def build_modify_data_container_request(
+    subscription_id,  # type: str
+    resource_group_name,  # type: str
+    workspace_name,  # type: str
+    name,  # type: str
+    **kwargs  # type: Any
+):
+    # type: (...) -> HttpRequest
+    content_type = kwargs.pop('content_type', None)  # type: Optional[str]
+
+    accept = "application/json"
+    # Construct URL
+    url = kwargs.pop("template_url", '/data/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datacontainer/{name}')
+    path_format_arguments = {
+        "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
+        "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
+        "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'),
+        "name": _SERIALIZER.url("name", name, 'str'),
+    }
+
+    url = _format_url_section(url, **path_format_arguments)
+
+    # Construct headers
+    header_parameters = kwargs.pop("headers", {})  # type: Dict[str, Any]
+    if content_type is not None:
+        header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str')
+    header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
+
+    return HttpRequest(
+        method="PATCH",
+        url=url,
+        headers=header_parameters,
+        **kwargs
+    )
+
+# fmt: on
+class DataContainerOperations(object):
+    """DataContainerOperations operations.
+
+    You should not instantiate this class directly. Instead, you should create a Client instance that
+    instantiates it for you and attaches it as an attribute.
+
+    :ivar models: Alias to model classes used in this operation group.
+    :type models: ~azure.mgmt.machinelearningservices.models
+    :param client: Client for service requests.
+    :param config: Configuration of service client.
+    :param serializer: An object model serializer.
+    :param deserializer: An object model deserializer.
+    """
+
+    models = _models
+
+    def __init__(self, client, config, serializer, deserializer):
+        self._client = client
+        self._serialize = serializer
+        self._deserialize = deserializer
+        self._config = config
+
+    @distributed_trace
+    def create_data_container(
+        self,
+        subscription_id,  # type: str
+        resource_group_name,  # type: str
+        workspace_name,  # type: str
+        body=None,  # type: Optional["_models.DataContainer"]
+        **kwargs  # type: Any
+    ):
+        # type: (...) -> "_models.DataContainerEntity"
+        """create_data_container.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param body:
+        :type body: ~azure.mgmt.machinelearningservices.models.DataContainer
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: DataContainerEntity, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.DataContainerEntity
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.DataContainerEntity"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        content_type = kwargs.pop('content_type', "application/json")  # type: Optional[str]
+
+        if body is not None:
+            _json = self._serialize.body(body, 'DataContainer')
+        else:
+            _json = None
+
+        request = build_create_data_container_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            content_type=content_type,
+            json=_json,
+            template_url=self.create_data_container.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('DataContainerEntity', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    create_data_container.metadata = {'url': '/data/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datacontainer'}  # type: ignore
+
+
+    @distributed_trace
+    def list_data_container(
+        self,
+        subscription_id,  # type: str
+        resource_group_name,  # type: str
+        workspace_name,  # type: str
+        **kwargs  # type: Any
+    ):
+        # type: (...) -> Iterable["_models.PaginatedDataContainerEntityList"]
+        """list_data_container.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: An iterator like instance of either PaginatedDataContainerEntityList or the result of
+         cls(response)
+        :rtype:
+         ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.PaginatedDataContainerEntityList]
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.PaginatedDataContainerEntityList"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+        def prepare_request(next_link=None):
+            if not next_link:
+                
+                request = build_list_data_container_request(
+                    subscription_id=subscription_id,
+                    resource_group_name=resource_group_name,
+                    workspace_name=workspace_name,
+                    template_url=self.list_data_container.metadata['url'],
+                )
+                request = _convert_request(request)
+                request.url = self._client.format_url(request.url)
+
+            else:
+                
+                request = build_list_data_container_request(
+                    subscription_id=subscription_id,
+                    resource_group_name=resource_group_name,
+                    workspace_name=workspace_name,
+                    template_url=next_link,
+                )
+                request = _convert_request(request)
+                request.url = self._client.format_url(request.url)
+                request.method = "GET"
+            return request
+
+        def extract_data(pipeline_response):
+            deserialized = self._deserialize("PaginatedDataContainerEntityList", pipeline_response)
+            list_of_elem = deserialized.value
+            if cls:
+                list_of_elem = cls(list_of_elem)
+            return deserialized.next_link or None, iter(list_of_elem)
+
+        def get_next(next_link=None):
+            request = prepare_request(next_link)
+
+            pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+            response = pipeline_response.http_response
+
+            if response.status_code not in [200]:
+                map_error(status_code=response.status_code, response=response, error_map=error_map)
+                error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+                raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+            return pipeline_response
+
+
+        return ItemPaged(
+            get_next, extract_data
+        )
+    list_data_container.metadata = {'url': '/data/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datacontainer'}  # type: ignore
+
+    @distributed_trace
+    def get_data_container(
+        self,
+        subscription_id,  # type: str
+        resource_group_name,  # type: str
+        workspace_name,  # type: str
+        name,  # type: str
+        **kwargs  # type: Any
+    ):
+        # type: (...) -> "_models.DataContainerEntity"
+        """get_data_container.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param name:
+        :type name: str
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: DataContainerEntity, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.DataContainerEntity
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.DataContainerEntity"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        
+        request = build_get_data_container_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            name=name,
+            template_url=self.get_data_container.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('DataContainerEntity', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    get_data_container.metadata = {'url': '/data/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datacontainer/{name}'}  # type: ignore
+
+
+    @distributed_trace
+    def modify_data_container(
+        self,
+        subscription_id,  # type: str
+        resource_group_name,  # type: str
+        workspace_name,  # type: str
+        name,  # type: str
+        body=None,  # type: Optional["_models.DataContainerMutable"]
+        **kwargs  # type: Any
+    ):
+        # type: (...) -> "_models.DataContainerEntity"
+        """modify_data_container.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param name:
+        :type name: str
+        :param body:
+        :type body: ~azure.mgmt.machinelearningservices.models.DataContainerMutable
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: DataContainerEntity, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.DataContainerEntity
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.DataContainerEntity"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        content_type = kwargs.pop('content_type', "application/json")  # type: Optional[str]
+
+        if body is not None:
+            _json = self._serialize.body(body, 'DataContainerMutable')
+        else:
+            _json = None
+
+        request = build_modify_data_container_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            name=name,
+            content_type=content_type,
+            json=_json,
+            template_url=self.modify_data_container.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('DataContainerEntity', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    modify_data_container.metadata = {'url': '/data/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datacontainer/{name}'}  # type: ignore
+
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/dataset_dataplane/operations/_data_version_operations.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/dataset_dataplane/operations/_data_version_operations.py
new file mode 100644
index 00000000..ae87ad4b
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/dataset_dataplane/operations/_data_version_operations.py
@@ -0,0 +1,1211 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+import functools
+from typing import TYPE_CHECKING
+import warnings
+
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.paging import ItemPaged
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import HttpResponse
+from azure.core.rest import HttpRequest
+from azure.core.tracing.decorator import distributed_trace
+from azure.mgmt.core.exceptions import ARMErrorFormat
+from msrest import Serializer
+
+from .. import models as _models
+from .._vendor import _convert_request, _format_url_section
+
+if TYPE_CHECKING:
+    # pylint: disable=unused-import,ungrouped-imports
+    from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar
+    T = TypeVar('T')
+    ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
+
+_SERIALIZER = Serializer()
+_SERIALIZER.client_side_validation = False
+# fmt: off
+
+def build_create_request(
+    subscription_id,  # type: str
+    resource_group_name,  # type: str
+    workspace_name,  # type: str
+    name,  # type: str
+    **kwargs  # type: Any
+):
+    # type: (...) -> HttpRequest
+    content_type = kwargs.pop('content_type', None)  # type: Optional[str]
+
+    accept = "application/json"
+    # Construct URL
+    url = kwargs.pop("template_url", '/data/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/dataversion/{name}/versions')
+    path_format_arguments = {
+        "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
+        "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
+        "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'),
+        "name": _SERIALIZER.url("name", name, 'str'),
+    }
+
+    url = _format_url_section(url, **path_format_arguments)
+
+    # Construct headers
+    header_parameters = kwargs.pop("headers", {})  # type: Dict[str, Any]
+    if content_type is not None:
+        header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str')
+    header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
+
+    return HttpRequest(
+        method="POST",
+        url=url,
+        headers=header_parameters,
+        **kwargs
+    )
+
+
+def build_list_request(
+    subscription_id,  # type: str
+    resource_group_name,  # type: str
+    workspace_name,  # type: str
+    name,  # type: str
+    **kwargs  # type: Any
+):
+    # type: (...) -> HttpRequest
+    order_by = kwargs.pop('order_by', None)  # type: Optional[str]
+    top = kwargs.pop('top', None)  # type: Optional[int]
+
+    accept = "application/json"
+    # Construct URL
+    url = kwargs.pop("template_url", '/data/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/dataversion/{name}/versions')
+    path_format_arguments = {
+        "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
+        "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
+        "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'),
+        "name": _SERIALIZER.url("name", name, 'str'),
+    }
+
+    url = _format_url_section(url, **path_format_arguments)
+
+    # Construct parameters
+    query_parameters = kwargs.pop("params", {})  # type: Dict[str, Any]
+    if order_by is not None:
+        query_parameters['orderBy'] = _SERIALIZER.query("order_by", order_by, 'str')
+    if top is not None:
+        query_parameters['top'] = _SERIALIZER.query("top", top, 'int')
+
+    # Construct headers
+    header_parameters = kwargs.pop("headers", {})  # type: Dict[str, Any]
+    header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
+
+    return HttpRequest(
+        method="GET",
+        url=url,
+        params=query_parameters,
+        headers=header_parameters,
+        **kwargs
+    )
+
+
+def build_get_request(
+    subscription_id,  # type: str
+    resource_group_name,  # type: str
+    workspace_name,  # type: str
+    name,  # type: str
+    version,  # type: str
+    **kwargs  # type: Any
+):
+    # type: (...) -> HttpRequest
+    accept = "application/json"
+    # Construct URL
+    url = kwargs.pop("template_url", '/data/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/dataversion/{name}/versions/{version}')
+    path_format_arguments = {
+        "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
+        "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
+        "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'),
+        "name": _SERIALIZER.url("name", name, 'str'),
+        "version": _SERIALIZER.url("version", version, 'str'),
+    }
+
+    url = _format_url_section(url, **path_format_arguments)
+
+    # Construct headers
+    header_parameters = kwargs.pop("headers", {})  # type: Dict[str, Any]
+    header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
+
+    return HttpRequest(
+        method="GET",
+        url=url,
+        headers=header_parameters,
+        **kwargs
+    )
+
+
+def build_modify_request(
+    subscription_id,  # type: str
+    resource_group_name,  # type: str
+    workspace_name,  # type: str
+    name,  # type: str
+    version,  # type: str
+    **kwargs  # type: Any
+):
+    # type: (...) -> HttpRequest
+    content_type = kwargs.pop('content_type', None)  # type: Optional[str]
+
+    accept = "application/json"
+    # Construct URL
+    url = kwargs.pop("template_url", '/data/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/dataversion/{name}/versions/{version}')
+    path_format_arguments = {
+        "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
+        "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
+        "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'),
+        "name": _SERIALIZER.url("name", name, 'str'),
+        "version": _SERIALIZER.url("version", version, 'str'),
+    }
+
+    url = _format_url_section(url, **path_format_arguments)
+
+    # Construct headers
+    header_parameters = kwargs.pop("headers", {})  # type: Dict[str, Any]
+    if content_type is not None:
+        header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str')
+    header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
+
+    return HttpRequest(
+        method="PATCH",
+        url=url,
+        headers=header_parameters,
+        **kwargs
+    )
+
+
+def build_delete_request(
+    subscription_id,  # type: str
+    resource_group_name,  # type: str
+    workspace_name,  # type: str
+    name,  # type: str
+    version,  # type: str
+    **kwargs  # type: Any
+):
+    # type: (...) -> HttpRequest
+    accept = "application/json"
+    # Construct URL
+    url = kwargs.pop("template_url", '/data/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/dataversion/{name}/versions/{version}')
+    path_format_arguments = {
+        "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
+        "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
+        "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'),
+        "name": _SERIALIZER.url("name", name, 'str'),
+        "version": _SERIALIZER.url("version", version, 'str'),
+    }
+
+    url = _format_url_section(url, **path_format_arguments)
+
+    # Construct headers
+    header_parameters = kwargs.pop("headers", {})  # type: Dict[str, Any]
+    header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
+
+    return HttpRequest(
+        method="DELETE",
+        url=url,
+        headers=header_parameters,
+        **kwargs
+    )
+
+
+def build_exists_request(
+    subscription_id,  # type: str
+    resource_group_name,  # type: str
+    workspace_name,  # type: str
+    name,  # type: str
+    version,  # type: str
+    **kwargs  # type: Any
+):
+    # type: (...) -> HttpRequest
+    accept = "application/json"
+    # Construct URL
+    url = kwargs.pop("template_url", '/data/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/dataversion/{name}/versions/{version}/exists')
+    path_format_arguments = {
+        "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
+        "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
+        "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'),
+        "name": _SERIALIZER.url("name", name, 'str'),
+        "version": _SERIALIZER.url("version", version, 'str'),
+    }
+
+    url = _format_url_section(url, **path_format_arguments)
+
+    # Construct headers
+    header_parameters = kwargs.pop("headers", {})  # type: Dict[str, Any]
+    header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
+
+    return HttpRequest(
+        method="GET",
+        url=url,
+        headers=header_parameters,
+        **kwargs
+    )
+
+
+def build_get_by_asset_id_request(
+    subscription_id,  # type: str
+    resource_group_name,  # type: str
+    workspace_name,  # type: str
+    **kwargs  # type: Any
+):
+    # type: (...) -> HttpRequest
+    content_type = kwargs.pop('content_type', None)  # type: Optional[str]
+
+    accept = "application/json"
+    # Construct URL
+    url = kwargs.pop("template_url", '/data/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/dataversion/getByAssetId')
+    path_format_arguments = {
+        "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
+        "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
+        "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'),
+    }
+
+    url = _format_url_section(url, **path_format_arguments)
+
+    # Construct headers
+    header_parameters = kwargs.pop("headers", {})  # type: Dict[str, Any]
+    if content_type is not None:
+        header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str')
+    header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
+
+    return HttpRequest(
+        method="POST",
+        url=url,
+        headers=header_parameters,
+        **kwargs
+    )
+
+
+def build_create_unregistered_input_data_request(
+    subscription_id,  # type: str
+    resource_group_name,  # type: str
+    workspace_name,  # type: str
+    **kwargs  # type: Any
+):
+    # type: (...) -> HttpRequest
+    content_type = kwargs.pop('content_type', None)  # type: Optional[str]
+
+    accept = "application/json"
+    # Construct URL
+    url = kwargs.pop("template_url", '/data/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/dataversion/createUnregisteredInput')
+    path_format_arguments = {
+        "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
+        "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
+        "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'),
+    }
+
+    url = _format_url_section(url, **path_format_arguments)
+
+    # Construct headers
+    header_parameters = kwargs.pop("headers", {})  # type: Dict[str, Any]
+    if content_type is not None:
+        header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str')
+    header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
+
+    return HttpRequest(
+        method="POST",
+        url=url,
+        headers=header_parameters,
+        **kwargs
+    )
+
+
+def build_create_unregistered_output_data_request(
+    subscription_id,  # type: str
+    resource_group_name,  # type: str
+    workspace_name,  # type: str
+    **kwargs  # type: Any
+):
+    # type: (...) -> HttpRequest
+    content_type = kwargs.pop('content_type', None)  # type: Optional[str]
+
+    accept = "application/json"
+    # Construct URL
+    url = kwargs.pop("template_url", '/data/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/dataversion/createUnregisteredOutput')
+    path_format_arguments = {
+        "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
+        "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
+        "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'),
+    }
+
+    url = _format_url_section(url, **path_format_arguments)
+
+    # Construct headers
+    header_parameters = kwargs.pop("headers", {})  # type: Dict[str, Any]
+    if content_type is not None:
+        header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str')
+    header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
+
+    return HttpRequest(
+        method="POST",
+        url=url,
+        headers=header_parameters,
+        **kwargs
+    )
+
+
+def build_registered_existing_data_request(
+    subscription_id,  # type: str
+    resource_group_name,  # type: str
+    workspace_name,  # type: str
+    **kwargs  # type: Any
+):
+    # type: (...) -> HttpRequest
+    content_type = kwargs.pop('content_type', None)  # type: Optional[str]
+
+    accept = "application/json"
+    # Construct URL
+    url = kwargs.pop("template_url", '/data/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/dataversion/registerExisting')
+    path_format_arguments = {
+        "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
+        "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
+        "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'),
+    }
+
+    url = _format_url_section(url, **path_format_arguments)
+
+    # Construct headers
+    header_parameters = kwargs.pop("headers", {})  # type: Dict[str, Any]
+    if content_type is not None:
+        header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str')
+    header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
+
+    return HttpRequest(
+        method="POST",
+        url=url,
+        headers=header_parameters,
+        **kwargs
+    )
+
+
+def build_batch_get_resolved_uris_request(
+    subscription_id,  # type: str
+    resource_group_name,  # type: str
+    workspace_name,  # type: str
+    **kwargs  # type: Any
+):
+    # type: (...) -> HttpRequest
+    content_type = kwargs.pop('content_type', None)  # type: Optional[str]
+
+    accept = "application/json"
+    # Construct URL
+    url = kwargs.pop("template_url", '/data/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/dataversion/batchGetResolvedUris')
+    path_format_arguments = {
+        "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
+        "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
+        "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'),
+    }
+
+    url = _format_url_section(url, **path_format_arguments)
+
+    # Construct headers
+    header_parameters = kwargs.pop("headers", {})  # type: Dict[str, Any]
+    if content_type is not None:
+        header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str')
+    header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
+
+    return HttpRequest(
+        method="POST",
+        url=url,
+        headers=header_parameters,
+        **kwargs
+    )
+
+# fmt: on
+class DataVersionOperations(object):
+    """DataVersionOperations operations.
+
+    You should not instantiate this class directly. Instead, you should create a Client instance that
+    instantiates it for you and attaches it as an attribute.
+
+    :ivar models: Alias to model classes used in this operation group.
+    :type models: ~azure.mgmt.machinelearningservices.models
+    :param client: Client for service requests.
+    :param config: Configuration of service client.
+    :param serializer: An object model serializer.
+    :param deserializer: An object model deserializer.
+    """
+
+    models = _models
+
+    def __init__(self, client, config, serializer, deserializer):
+        self._client = client
+        self._serialize = serializer
+        self._deserialize = deserializer
+        self._config = config
+
+    @distributed_trace
+    def create(
+        self,
+        subscription_id,  # type: str
+        resource_group_name,  # type: str
+        workspace_name,  # type: str
+        name,  # type: str
+        body=None,  # type: Optional["_models.DataVersion"]
+        **kwargs  # type: Any
+    ):
+        # type: (...) -> "_models.DataVersionEntity"
+        """create.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param name:
+        :type name: str
+        :param body:
+        :type body: ~azure.mgmt.machinelearningservices.models.DataVersion
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: DataVersionEntity, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.DataVersionEntity
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.DataVersionEntity"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        content_type = kwargs.pop('content_type', "application/json")  # type: Optional[str]
+
+        if body is not None:
+            _json = self._serialize.body(body, 'DataVersion')
+        else:
+            _json = None
+
+        request = build_create_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            name=name,
+            content_type=content_type,
+            json=_json,
+            template_url=self.create.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('DataVersionEntity', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    create.metadata = {'url': '/data/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/dataversion/{name}/versions'}  # type: ignore
+
+
+    @distributed_trace
+    def list(
+        self,
+        subscription_id,  # type: str
+        resource_group_name,  # type: str
+        workspace_name,  # type: str
+        name,  # type: str
+        order_by=None,  # type: Optional[str]
+        top=None,  # type: Optional[int]
+        **kwargs  # type: Any
+    ):
+        # type: (...) -> Iterable["_models.PaginatedDataVersionEntityList"]
+        """list.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param name:
+        :type name: str
+        :param order_by:
+        :type order_by: str
+        :param top:
+        :type top: int
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: An iterator like instance of either PaginatedDataVersionEntityList or the result of
+         cls(response)
+        :rtype:
+         ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.PaginatedDataVersionEntityList]
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.PaginatedDataVersionEntityList"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+        def prepare_request(next_link=None):
+            if not next_link:
+                
+                request = build_list_request(
+                    subscription_id=subscription_id,
+                    resource_group_name=resource_group_name,
+                    workspace_name=workspace_name,
+                    name=name,
+                    order_by=order_by,
+                    top=top,
+                    template_url=self.list.metadata['url'],
+                )
+                request = _convert_request(request)
+                request.url = self._client.format_url(request.url)
+
+            else:
+                
+                request = build_list_request(
+                    subscription_id=subscription_id,
+                    resource_group_name=resource_group_name,
+                    workspace_name=workspace_name,
+                    name=name,
+                    order_by=order_by,
+                    top=top,
+                    template_url=next_link,
+                )
+                request = _convert_request(request)
+                request.url = self._client.format_url(request.url)
+                request.method = "GET"
+            return request
+
+        def extract_data(pipeline_response):
+            deserialized = self._deserialize("PaginatedDataVersionEntityList", pipeline_response)
+            list_of_elem = deserialized.value
+            if cls:
+                list_of_elem = cls(list_of_elem)
+            return deserialized.next_link or None, iter(list_of_elem)
+
+        def get_next(next_link=None):
+            request = prepare_request(next_link)
+
+            pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+            response = pipeline_response.http_response
+
+            if response.status_code not in [200]:
+                map_error(status_code=response.status_code, response=response, error_map=error_map)
+                error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+                raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+            return pipeline_response
+
+
+        return ItemPaged(
+            get_next, extract_data
+        )
+    list.metadata = {'url': '/data/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/dataversion/{name}/versions'}  # type: ignore
+
+    @distributed_trace
+    def get(
+        self,
+        subscription_id,  # type: str
+        resource_group_name,  # type: str
+        workspace_name,  # type: str
+        name,  # type: str
+        version,  # type: str
+        **kwargs  # type: Any
+    ):
+        # type: (...) -> "_models.DataVersionEntity"
+        """get.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param name:
+        :type name: str
+        :param version:
+        :type version: str
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: DataVersionEntity, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.DataVersionEntity
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.DataVersionEntity"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        
+        request = build_get_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            name=name,
+            version=version,
+            template_url=self.get.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('DataVersionEntity', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    get.metadata = {'url': '/data/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/dataversion/{name}/versions/{version}'}  # type: ignore
+
+
+    @distributed_trace
+    def modify(
+        self,
+        subscription_id,  # type: str
+        resource_group_name,  # type: str
+        workspace_name,  # type: str
+        name,  # type: str
+        version,  # type: str
+        body=None,  # type: Optional["_models.DataVersionMutable"]
+        **kwargs  # type: Any
+    ):
+        # type: (...) -> "_models.DataVersionEntity"
+        """modify.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param name:
+        :type name: str
+        :param version:
+        :type version: str
+        :param body:
+        :type body: ~azure.mgmt.machinelearningservices.models.DataVersionMutable
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: DataVersionEntity, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.DataVersionEntity
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.DataVersionEntity"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        content_type = kwargs.pop('content_type', "application/json")  # type: Optional[str]
+
+        if body is not None:
+            _json = self._serialize.body(body, 'DataVersionMutable')
+        else:
+            _json = None
+
+        request = build_modify_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            name=name,
+            version=version,
+            content_type=content_type,
+            json=_json,
+            template_url=self.modify.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('DataVersionEntity', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    modify.metadata = {'url': '/data/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/dataversion/{name}/versions/{version}'}  # type: ignore
+
+
+    @distributed_trace
+    def delete(
+        self,
+        subscription_id,  # type: str
+        resource_group_name,  # type: str
+        workspace_name,  # type: str
+        name,  # type: str
+        version,  # type: str
+        **kwargs  # type: Any
+    ):
+        # type: (...) -> "_models.HttpResponseMessage"
+        """delete.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param name:
+        :type name: str
+        :param version:
+        :type version: str
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: HttpResponseMessage, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.HttpResponseMessage
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.HttpResponseMessage"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        
+        request = build_delete_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            name=name,
+            version=version,
+            template_url=self.delete.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('HttpResponseMessage', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    delete.metadata = {'url': '/data/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/dataversion/{name}/versions/{version}'}  # type: ignore
+
+
+    @distributed_trace
+    def exists(
+        self,
+        subscription_id,  # type: str
+        resource_group_name,  # type: str
+        workspace_name,  # type: str
+        name,  # type: str
+        version,  # type: str
+        **kwargs  # type: Any
+    ):
+        # type: (...) -> bool
+        """exists.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param name:
+        :type name: str
+        :param version:
+        :type version: str
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: bool, or the result of cls(response)
+        :rtype: bool
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType[bool]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        
+        request = build_exists_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            name=name,
+            version=version,
+            template_url=self.exists.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('bool', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    exists.metadata = {'url': '/data/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/dataversion/{name}/versions/{version}/exists'}  # type: ignore
+
+
+    @distributed_trace
+    def get_by_asset_id(
+        self,
+        subscription_id,  # type: str
+        resource_group_name,  # type: str
+        workspace_name,  # type: str
+        body=None,  # type: Optional["_models.AssetId"]
+        **kwargs  # type: Any
+    ):
+        # type: (...) -> "_models.DataVersionEntity"
+        """get_by_asset_id.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param body:
+        :type body: ~azure.mgmt.machinelearningservices.models.AssetId
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: DataVersionEntity, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.DataVersionEntity
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.DataVersionEntity"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        content_type = kwargs.pop('content_type', "application/json")  # type: Optional[str]
+
+        if body is not None:
+            _json = self._serialize.body(body, 'AssetId')
+        else:
+            _json = None
+
+        request = build_get_by_asset_id_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            content_type=content_type,
+            json=_json,
+            template_url=self.get_by_asset_id.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('DataVersionEntity', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    get_by_asset_id.metadata = {'url': '/data/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/dataversion/getByAssetId'}  # type: ignore
+
+
+    @distributed_trace
+    def create_unregistered_input_data(
+        self,
+        subscription_id,  # type: str
+        resource_group_name,  # type: str
+        workspace_name,  # type: str
+        body=None,  # type: Optional["_models.CreateUnregisteredInputData"]
+        **kwargs  # type: Any
+    ):
+        # type: (...) -> "_models.DataContainerEntity"
+        """create_unregistered_input_data.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param body:
+        :type body: ~azure.mgmt.machinelearningservices.models.CreateUnregisteredInputData
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: DataContainerEntity, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.DataContainerEntity
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.DataContainerEntity"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        content_type = kwargs.pop('content_type', "application/json")  # type: Optional[str]
+
+        if body is not None:
+            _json = self._serialize.body(body, 'CreateUnregisteredInputData')
+        else:
+            _json = None
+
+        request = build_create_unregistered_input_data_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            content_type=content_type,
+            json=_json,
+            template_url=self.create_unregistered_input_data.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('DataContainerEntity', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    create_unregistered_input_data.metadata = {'url': '/data/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/dataversion/createUnregisteredInput'}  # type: ignore
+
+
+    @distributed_trace
+    def create_unregistered_output_data(
+        self,
+        subscription_id,  # type: str
+        resource_group_name,  # type: str
+        workspace_name,  # type: str
+        body=None,  # type: Optional["_models.CreateUnregisteredOutputData"]
+        **kwargs  # type: Any
+    ):
+        # type: (...) -> "_models.DataContainerEntity"
+        """create_unregistered_output_data.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param body:
+        :type body: ~azure.mgmt.machinelearningservices.models.CreateUnregisteredOutputData
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: DataContainerEntity, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.DataContainerEntity
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.DataContainerEntity"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        content_type = kwargs.pop('content_type', "application/json")  # type: Optional[str]
+
+        if body is not None:
+            _json = self._serialize.body(body, 'CreateUnregisteredOutputData')
+        else:
+            _json = None
+
+        request = build_create_unregistered_output_data_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            content_type=content_type,
+            json=_json,
+            template_url=self.create_unregistered_output_data.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('DataContainerEntity', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    create_unregistered_output_data.metadata = {'url': '/data/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/dataversion/createUnregisteredOutput'}  # type: ignore
+
+
+    @distributed_trace
+    def registered_existing_data(
+        self,
+        subscription_id,  # type: str
+        resource_group_name,  # type: str
+        workspace_name,  # type: str
+        body=None,  # type: Optional["_models.RegisterExistingData"]
+        **kwargs  # type: Any
+    ):
+        # type: (...) -> "_models.DataContainerEntity"
+        """registered_existing_data.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param body:
+        :type body: ~azure.mgmt.machinelearningservices.models.RegisterExistingData
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: DataContainerEntity, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.DataContainerEntity
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.DataContainerEntity"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        content_type = kwargs.pop('content_type', "application/json")  # type: Optional[str]
+
+        if body is not None:
+            _json = self._serialize.body(body, 'RegisterExistingData')
+        else:
+            _json = None
+
+        request = build_registered_existing_data_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            content_type=content_type,
+            json=_json,
+            template_url=self.registered_existing_data.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('DataContainerEntity', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    registered_existing_data.metadata = {'url': '/data/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/dataversion/registerExisting'}  # type: ignore
+
+
+    @distributed_trace
+    def batch_get_resolved_uris(
+        self,
+        subscription_id,  # type: str
+        resource_group_name,  # type: str
+        workspace_name,  # type: str
+        body=None,  # type: Optional["_models.BatchGetResolvedURIs"]
+        **kwargs  # type: Any
+    ):
+        # type: (...) -> "_models.BatchDataUriResponse"
+        """batch_get_resolved_uris.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param body:
+        :type body: ~azure.mgmt.machinelearningservices.models.BatchGetResolvedURIs
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: BatchDataUriResponse, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.BatchDataUriResponse
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.BatchDataUriResponse"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        content_type = kwargs.pop('content_type', "application/json")  # type: Optional[str]
+
+        if body is not None:
+            _json = self._serialize.body(body, 'BatchGetResolvedURIs')
+        else:
+            _json = None
+
+        request = build_batch_get_resolved_uris_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            content_type=content_type,
+            json=_json,
+            template_url=self.batch_get_resolved_uris.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('BatchDataUriResponse', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    batch_get_resolved_uris.metadata = {'url': '/data/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/dataversion/batchGetResolvedUris'}  # type: ignore
+
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/dataset_dataplane/operations/_dataset_controller_v2_operations.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/dataset_dataplane/operations/_dataset_controller_v2_operations.py
new file mode 100644
index 00000000..05d64736
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/dataset_dataplane/operations/_dataset_controller_v2_operations.py
@@ -0,0 +1,1300 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+import functools
+from typing import TYPE_CHECKING
+import warnings
+
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.paging import ItemPaged
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import HttpResponse
+from azure.core.rest import HttpRequest
+from azure.core.tracing.decorator import distributed_trace
+from azure.mgmt.core.exceptions import ARMErrorFormat
+from msrest import Serializer
+
+from .. import models as _models
+from .._vendor import _convert_request, _format_url_section
+
+if TYPE_CHECKING:
+    # pylint: disable=unused-import,ungrouped-imports
+    from typing import Any, Callable, Dict, Generic, Iterable, List, Optional, TypeVar
+    T = TypeVar('T')
+    ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
+
+_SERIALIZER = Serializer()
+_SERIALIZER.client_side_validation = False
+# fmt: off
+
+def build_get_dataset_definition_request(
+    subscription_id,  # type: str
+    resource_group_name,  # type: str
+    workspace_name,  # type: str
+    dataset_id,  # type: str
+    version,  # type: str
+    **kwargs  # type: Any
+):
+    # type: (...) -> HttpRequest
+    accept = "application/json"
+    # Construct URL
+    url = kwargs.pop("template_url", '/dataset/v1.2/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datasets/{datasetId}/definitions/{version}')
+    path_format_arguments = {
+        "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
+        "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
+        "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'),
+        "datasetId": _SERIALIZER.url("dataset_id", dataset_id, 'str'),
+        "version": _SERIALIZER.url("version", version, 'str'),
+    }
+
+    url = _format_url_section(url, **path_format_arguments)
+
+    # Construct headers
+    header_parameters = kwargs.pop("headers", {})  # type: Dict[str, Any]
+    header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
+
+    return HttpRequest(
+        method="GET",
+        url=url,
+        headers=header_parameters,
+        **kwargs
+    )
+
+
+def build_get_all_dataset_definitions_request(
+    subscription_id,  # type: str
+    resource_group_name,  # type: str
+    workspace_name,  # type: str
+    dataset_id,  # type: str
+    **kwargs  # type: Any
+):
+    # type: (...) -> HttpRequest
+    continuation_token_parameter = kwargs.pop('continuation_token_parameter', None)  # type: Optional[str]
+    page_size = kwargs.pop('page_size', None)  # type: Optional[int]
+
+    accept = "application/json"
+    # Construct URL
+    url = kwargs.pop("template_url", '/dataset/v1.2/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datasets/{datasetId}/definitions')
+    path_format_arguments = {
+        "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
+        "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
+        "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'),
+        "datasetId": _SERIALIZER.url("dataset_id", dataset_id, 'str'),
+    }
+
+    url = _format_url_section(url, **path_format_arguments)
+
+    # Construct parameters
+    query_parameters = kwargs.pop("params", {})  # type: Dict[str, Any]
+    if continuation_token_parameter is not None:
+        query_parameters['continuationToken'] = _SERIALIZER.query("continuation_token_parameter", continuation_token_parameter, 'str')
+    if page_size is not None:
+        query_parameters['pageSize'] = _SERIALIZER.query("page_size", page_size, 'int')
+
+    # Construct headers
+    header_parameters = kwargs.pop("headers", {})  # type: Dict[str, Any]
+    header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
+
+    return HttpRequest(
+        method="GET",
+        url=url,
+        params=query_parameters,
+        headers=header_parameters,
+        **kwargs
+    )
+
+
+def build_update_definition_request(
+    subscription_id,  # type: str
+    resource_group_name,  # type: str
+    workspace_name,  # type: str
+    dataset_id,  # type: str
+    **kwargs  # type: Any
+):
+    # type: (...) -> HttpRequest
+    content_type = kwargs.pop('content_type', None)  # type: Optional[str]
+    register_as_pending = kwargs.pop('register_as_pending', False)  # type: Optional[bool]
+    force_update = kwargs.pop('force_update', False)  # type: Optional[bool]
+    dataset_type = kwargs.pop('dataset_type', None)  # type: Optional[str]
+    user_version_id = kwargs.pop('user_version_id', None)  # type: Optional[str]
+
+    accept = "application/json"
+    # Construct URL
+    url = kwargs.pop("template_url", '/dataset/v1.2/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datasets/{datasetId}/definitions')
+    path_format_arguments = {
+        "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
+        "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
+        "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'),
+        "datasetId": _SERIALIZER.url("dataset_id", dataset_id, 'str'),
+    }
+
+    url = _format_url_section(url, **path_format_arguments)
+
+    # Construct parameters
+    query_parameters = kwargs.pop("params", {})  # type: Dict[str, Any]
+    if register_as_pending is not None:
+        query_parameters['registerAsPending'] = _SERIALIZER.query("register_as_pending", register_as_pending, 'bool')
+    if force_update is not None:
+        query_parameters['forceUpdate'] = _SERIALIZER.query("force_update", force_update, 'bool')
+    if dataset_type is not None:
+        query_parameters['datasetType'] = _SERIALIZER.query("dataset_type", dataset_type, 'str')
+    if user_version_id is not None:
+        query_parameters['userVersionId'] = _SERIALIZER.query("user_version_id", user_version_id, 'str')
+
+    # Construct headers
+    header_parameters = kwargs.pop("headers", {})  # type: Dict[str, Any]
+    if content_type is not None:
+        header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str')
+    header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
+
+    return HttpRequest(
+        method="POST",
+        url=url,
+        params=query_parameters,
+        headers=header_parameters,
+        **kwargs
+    )
+
+
+def build_get_all_dataset_versions_request(
+    subscription_id,  # type: str
+    resource_group_name,  # type: str
+    workspace_name,  # type: str
+    dataset_id,  # type: str
+    **kwargs  # type: Any
+):
+    # type: (...) -> HttpRequest
+    continuation_token_parameter = kwargs.pop('continuation_token_parameter', None)  # type: Optional[str]
+    page_size = kwargs.pop('page_size', None)  # type: Optional[int]
+
+    accept = "application/json"
+    # Construct URL
+    url = kwargs.pop("template_url", '/dataset/v1.2/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datasets/{datasetId}/versions')
+    path_format_arguments = {
+        "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
+        "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
+        "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'),
+        "datasetId": _SERIALIZER.url("dataset_id", dataset_id, 'str'),
+    }
+
+    url = _format_url_section(url, **path_format_arguments)
+
+    # Construct parameters
+    query_parameters = kwargs.pop("params", {})  # type: Dict[str, Any]
+    if continuation_token_parameter is not None:
+        query_parameters['continuationToken'] = _SERIALIZER.query("continuation_token_parameter", continuation_token_parameter, 'str')
+    if page_size is not None:
+        query_parameters['pageSize'] = _SERIALIZER.query("page_size", page_size, 'int')
+
+    # Construct headers
+    header_parameters = kwargs.pop("headers", {})  # type: Dict[str, Any]
+    header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
+
+    return HttpRequest(
+        method="GET",
+        url=url,
+        params=query_parameters,
+        headers=header_parameters,
+        **kwargs
+    )
+
+
+def build_get_dataset_by_name_request(
+    subscription_id,  # type: str
+    resource_group_name,  # type: str
+    workspace_name,  # type: str
+    dataset_name,  # type: str
+    **kwargs  # type: Any
+):
+    # type: (...) -> HttpRequest
+    version_id = kwargs.pop('version_id', None)  # type: Optional[str]
+    include_latest_definition = kwargs.pop('include_latest_definition', True)  # type: Optional[bool]
+
+    accept = "application/json"
+    # Construct URL
+    url = kwargs.pop("template_url", '/dataset/v1.2/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datasets/query/name={datasetName}')
+    path_format_arguments = {
+        "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
+        "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
+        "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'),
+        "datasetName": _SERIALIZER.url("dataset_name", dataset_name, 'str'),
+    }
+
+    url = _format_url_section(url, **path_format_arguments)
+
+    # Construct parameters
+    query_parameters = kwargs.pop("params", {})  # type: Dict[str, Any]
+    if version_id is not None:
+        query_parameters['versionId'] = _SERIALIZER.query("version_id", version_id, 'str')
+    if include_latest_definition is not None:
+        query_parameters['includeLatestDefinition'] = _SERIALIZER.query("include_latest_definition", include_latest_definition, 'bool')
+
+    # Construct headers
+    header_parameters = kwargs.pop("headers", {})  # type: Dict[str, Any]
+    header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
+
+    return HttpRequest(
+        method="GET",
+        url=url,
+        params=query_parameters,
+        headers=header_parameters,
+        **kwargs
+    )
+
+
+def build_list_request(
+    subscription_id,  # type: str
+    resource_group_name,  # type: str
+    workspace_name,  # type: str
+    **kwargs  # type: Any
+):
+    # type: (...) -> HttpRequest
+    dataset_names = kwargs.pop('dataset_names', None)  # type: Optional[List[str]]
+    search_text = kwargs.pop('search_text', None)  # type: Optional[str]
+    include_invisible = kwargs.pop('include_invisible', False)  # type: Optional[bool]
+    status = kwargs.pop('status', None)  # type: Optional[str]
+    continuation_token_parameter = kwargs.pop('continuation_token_parameter', None)  # type: Optional[str]
+    page_size = kwargs.pop('page_size', None)  # type: Optional[int]
+    include_latest_definition = kwargs.pop('include_latest_definition', False)  # type: Optional[bool]
+    order_by = kwargs.pop('order_by', None)  # type: Optional[str]
+    order_by_asc = kwargs.pop('order_by_asc', False)  # type: Optional[bool]
+    dataset_types = kwargs.pop('dataset_types', None)  # type: Optional[List[str]]
+
+    accept = "application/json"
+    # Construct URL
+    url = kwargs.pop("template_url", '/dataset/v1.2/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datasets')
+    path_format_arguments = {
+        "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
+        "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
+        "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'),
+    }
+
+    url = _format_url_section(url, **path_format_arguments)
+
+    # Construct parameters
+    query_parameters = kwargs.pop("params", {})  # type: Dict[str, Any]
+    if dataset_names is not None:
+        query_parameters['datasetNames'] = _SERIALIZER.query("dataset_names", dataset_names, '[str]')
+    if search_text is not None:
+        query_parameters['searchText'] = _SERIALIZER.query("search_text", search_text, 'str')
+    if include_invisible is not None:
+        query_parameters['includeInvisible'] = _SERIALIZER.query("include_invisible", include_invisible, 'bool')
+    if status is not None:
+        query_parameters['status'] = _SERIALIZER.query("status", status, 'str')
+    if continuation_token_parameter is not None:
+        query_parameters['continuationToken'] = _SERIALIZER.query("continuation_token_parameter", continuation_token_parameter, 'str')
+    if page_size is not None:
+        query_parameters['pageSize'] = _SERIALIZER.query("page_size", page_size, 'int')
+    if include_latest_definition is not None:
+        query_parameters['includeLatestDefinition'] = _SERIALIZER.query("include_latest_definition", include_latest_definition, 'bool')
+    if order_by is not None:
+        query_parameters['orderBy'] = _SERIALIZER.query("order_by", order_by, 'str')
+    if order_by_asc is not None:
+        query_parameters['orderByAsc'] = _SERIALIZER.query("order_by_asc", order_by_asc, 'bool')
+    if dataset_types is not None:
+        query_parameters['datasetTypes'] = _SERIALIZER.query("dataset_types", dataset_types, '[str]')
+
+    # Construct headers
+    header_parameters = kwargs.pop("headers", {})  # type: Dict[str, Any]
+    header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
+
+    return HttpRequest(
+        method="GET",
+        url=url,
+        params=query_parameters,
+        headers=header_parameters,
+        **kwargs
+    )
+
+
+def build_register_request(
+    subscription_id,  # type: str
+    resource_group_name,  # type: str
+    workspace_name,  # type: str
+    **kwargs  # type: Any
+):
+    # type: (...) -> HttpRequest
+    content_type = kwargs.pop('content_type', None)  # type: Optional[str]
+    register_as_pending = kwargs.pop('register_as_pending', False)  # type: Optional[bool]
+    if_exists_ok = kwargs.pop('if_exists_ok', True)  # type: Optional[bool]
+    update_definition_if_exists = kwargs.pop('update_definition_if_exists', False)  # type: Optional[bool]
+    with_data_hash = kwargs.pop('with_data_hash', False)  # type: Optional[bool]
+    user_version_id = kwargs.pop('user_version_id', None)  # type: Optional[str]
+
+    accept = "application/json"
+    # Construct URL
+    url = kwargs.pop("template_url", '/dataset/v1.2/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datasets')
+    path_format_arguments = {
+        "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
+        "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
+        "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'),
+    }
+
+    url = _format_url_section(url, **path_format_arguments)
+
+    # Construct parameters
+    query_parameters = kwargs.pop("params", {})  # type: Dict[str, Any]
+    if register_as_pending is not None:
+        query_parameters['registerAsPending'] = _SERIALIZER.query("register_as_pending", register_as_pending, 'bool')
+    if if_exists_ok is not None:
+        query_parameters['ifExistsOk'] = _SERIALIZER.query("if_exists_ok", if_exists_ok, 'bool')
+    if update_definition_if_exists is not None:
+        query_parameters['updateDefinitionIfExists'] = _SERIALIZER.query("update_definition_if_exists", update_definition_if_exists, 'bool')
+    if with_data_hash is not None:
+        query_parameters['withDataHash'] = _SERIALIZER.query("with_data_hash", with_data_hash, 'bool')
+    if user_version_id is not None:
+        query_parameters['userVersionId'] = _SERIALIZER.query("user_version_id", user_version_id, 'str')
+
+    # Construct headers
+    header_parameters = kwargs.pop("headers", {})  # type: Dict[str, Any]
+    if content_type is not None:
+        header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str')
+    header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
+
+    return HttpRequest(
+        method="POST",
+        url=url,
+        params=query_parameters,
+        headers=header_parameters,
+        **kwargs
+    )
+
+
+def build_delete_all_datasets_request(
+    subscription_id,  # type: str
+    resource_group_name,  # type: str
+    workspace_name,  # type: str
+    **kwargs  # type: Any
+):
+    # type: (...) -> HttpRequest
+    accept = "application/json"
+    # Construct URL
+    url = kwargs.pop("template_url", '/dataset/v1.2/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datasets')
+    path_format_arguments = {
+        "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
+        "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
+        "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'),
+    }
+
+    url = _format_url_section(url, **path_format_arguments)
+
+    # Construct headers
+    header_parameters = kwargs.pop("headers", {})  # type: Dict[str, Any]
+    header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
+
+    return HttpRequest(
+        method="DELETE",
+        url=url,
+        headers=header_parameters,
+        **kwargs
+    )
+
+
+def build_update_dataset_request(
+    subscription_id,  # type: str
+    resource_group_name,  # type: str
+    workspace_name,  # type: str
+    dataset_id,  # type: str
+    **kwargs  # type: Any
+):
+    # type: (...) -> HttpRequest
+    content_type = kwargs.pop('content_type', None)  # type: Optional[str]
+    force_update = kwargs.pop('force_update', False)  # type: Optional[bool]
+
+    accept = "application/json"
+    # Construct URL
+    url = kwargs.pop("template_url", '/dataset/v1.2/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datasets/{datasetId}')
+    path_format_arguments = {
+        "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
+        "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
+        "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'),
+        "datasetId": _SERIALIZER.url("dataset_id", dataset_id, 'str'),
+    }
+
+    url = _format_url_section(url, **path_format_arguments)
+
+    # Construct parameters
+    query_parameters = kwargs.pop("params", {})  # type: Dict[str, Any]
+    if force_update is not None:
+        query_parameters['forceUpdate'] = _SERIALIZER.query("force_update", force_update, 'bool')
+
+    # Construct headers
+    header_parameters = kwargs.pop("headers", {})  # type: Dict[str, Any]
+    if content_type is not None:
+        header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str')
+    header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
+
+    return HttpRequest(
+        method="PUT",
+        url=url,
+        params=query_parameters,
+        headers=header_parameters,
+        **kwargs
+    )
+
+
+def build_unregister_dataset_request(
+    subscription_id,  # type: str
+    resource_group_name,  # type: str
+    workspace_name,  # type: str
+    name,  # type: str
+    **kwargs  # type: Any
+):
+    # type: (...) -> HttpRequest
+    accept = "application/json"
+    # Construct URL
+    url = kwargs.pop("template_url", '/dataset/v1.2/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datasets/{name}')
+    path_format_arguments = {
+        "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
+        "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
+        "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'),
+        "name": _SERIALIZER.url("name", name, 'str'),
+    }
+
+    url = _format_url_section(url, **path_format_arguments)
+
+    # Construct headers
+    header_parameters = kwargs.pop("headers", {})  # type: Dict[str, Any]
+    header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
+
+    return HttpRequest(
+        method="DELETE",
+        url=url,
+        headers=header_parameters,
+        **kwargs
+    )
+
+# fmt: on
+class DatasetControllerV2Operations(object):
+    """DatasetControllerV2Operations operations.
+
+    You should not instantiate this class directly. Instead, you should create a Client instance that
+    instantiates it for you and attaches it as an attribute.
+
+    :ivar models: Alias to model classes used in this operation group.
+    :type models: ~azure.mgmt.machinelearningservices.models
+    :param client: Client for service requests.
+    :param config: Configuration of service client.
+    :param serializer: An object model serializer.
+    :param deserializer: An object model deserializer.
+    """
+
+    models = _models
+
+    def __init__(self, client, config, serializer, deserializer):
+        self._client = client
+        self._serialize = serializer
+        self._deserialize = deserializer
+        self._config = config
+
+    @distributed_trace
+    def get_dataset_definition(
+        self,
+        subscription_id,  # type: str
+        resource_group_name,  # type: str
+        workspace_name,  # type: str
+        dataset_id,  # type: str
+        version,  # type: str
+        **kwargs  # type: Any
+    ):
+        # type: (...) -> "_models.DatasetDefinition"
+        """Get a specific dataset definition.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param dataset_id:
+        :type dataset_id: str
+        :param version:
+        :type version: str
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: DatasetDefinition, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.DatasetDefinition
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.DatasetDefinition"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        
+        request = build_get_dataset_definition_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            dataset_id=dataset_id,
+            version=version,
+            template_url=self.get_dataset_definition.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('DatasetDefinition', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    get_dataset_definition.metadata = {'url': '/dataset/v1.2/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datasets/{datasetId}/definitions/{version}'}  # type: ignore
+
+
+    @distributed_trace
+    def get_all_dataset_definitions(
+        self,
+        subscription_id,  # type: str
+        resource_group_name,  # type: str
+        workspace_name,  # type: str
+        dataset_id,  # type: str
+        continuation_token_parameter=None,  # type: Optional[str]
+        page_size=None,  # type: Optional[int]
+        **kwargs  # type: Any
+    ):
+        # type: (...) -> Iterable["_models.PaginatedDatasetDefinitionList"]
+        """Get all dataset definitions for a given dataset.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param dataset_id:
+        :type dataset_id: str
+        :param continuation_token_parameter:
+        :type continuation_token_parameter: str
+        :param page_size:
+        :type page_size: int
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: An iterator like instance of either PaginatedDatasetDefinitionList or the result of
+         cls(response)
+        :rtype:
+         ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.PaginatedDatasetDefinitionList]
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.PaginatedDatasetDefinitionList"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+        def prepare_request(next_link=None):
+            if not next_link:
+                
+                request = build_get_all_dataset_definitions_request(
+                    subscription_id=subscription_id,
+                    resource_group_name=resource_group_name,
+                    workspace_name=workspace_name,
+                    dataset_id=dataset_id,
+                    continuation_token_parameter=continuation_token_parameter,
+                    page_size=page_size,
+                    template_url=self.get_all_dataset_definitions.metadata['url'],
+                )
+                request = _convert_request(request)
+                request.url = self._client.format_url(request.url)
+
+            else:
+                
+                request = build_get_all_dataset_definitions_request(
+                    subscription_id=subscription_id,
+                    resource_group_name=resource_group_name,
+                    workspace_name=workspace_name,
+                    dataset_id=dataset_id,
+                    continuation_token_parameter=continuation_token_parameter,
+                    page_size=page_size,
+                    template_url=next_link,
+                )
+                request = _convert_request(request)
+                request.url = self._client.format_url(request.url)
+                request.method = "GET"
+            return request
+
+        def extract_data(pipeline_response):
+            deserialized = self._deserialize("PaginatedDatasetDefinitionList", pipeline_response)
+            list_of_elem = deserialized.value
+            if cls:
+                list_of_elem = cls(list_of_elem)
+            return deserialized.next_link or None, iter(list_of_elem)
+
+        def get_next(next_link=None):
+            request = prepare_request(next_link)
+
+            pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+            response = pipeline_response.http_response
+
+            if response.status_code not in [200]:
+                map_error(status_code=response.status_code, response=response, error_map=error_map)
+                error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+                raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+            return pipeline_response
+
+
+        return ItemPaged(
+            get_next, extract_data
+        )
+    get_all_dataset_definitions.metadata = {'url': '/dataset/v1.2/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datasets/{datasetId}/definitions'}  # type: ignore
+
+    @distributed_trace
+    def update_definition(
+        self,
+        subscription_id,  # type: str
+        resource_group_name,  # type: str
+        workspace_name,  # type: str
+        dataset_id,  # type: str
+        register_as_pending=False,  # type: Optional[bool]
+        force_update=False,  # type: Optional[bool]
+        dataset_type=None,  # type: Optional[str]
+        user_version_id=None,  # type: Optional[str]
+        body=None,  # type: Optional["_models.DatasetDefinition"]
+        **kwargs  # type: Any
+    ):
+        # type: (...) -> "_models.Dataset"
+        """Update a dataset definition.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param dataset_id:
+        :type dataset_id: str
+        :param register_as_pending:
+        :type register_as_pending: bool
+        :param force_update:
+        :type force_update: bool
+        :param dataset_type:
+        :type dataset_type: str
+        :param user_version_id:
+        :type user_version_id: str
+        :param body:
+        :type body: ~azure.mgmt.machinelearningservices.models.DatasetDefinition
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: Dataset, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.Dataset
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.Dataset"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        content_type = kwargs.pop('content_type', "application/json")  # type: Optional[str]
+
+        if body is not None:
+            _json = self._serialize.body(body, 'DatasetDefinition')
+        else:
+            _json = None
+
+        request = build_update_definition_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            dataset_id=dataset_id,
+            content_type=content_type,
+            json=_json,
+            register_as_pending=register_as_pending,
+            force_update=force_update,
+            dataset_type=dataset_type,
+            user_version_id=user_version_id,
+            template_url=self.update_definition.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('Dataset', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    update_definition.metadata = {'url': '/dataset/v1.2/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datasets/{datasetId}/definitions'}  # type: ignore
+
+
+    @distributed_trace
+    def get_all_dataset_versions(
+        self,
+        subscription_id,  # type: str
+        resource_group_name,  # type: str
+        workspace_name,  # type: str
+        dataset_id,  # type: str
+        continuation_token_parameter=None,  # type: Optional[str]
+        page_size=None,  # type: Optional[int]
+        **kwargs  # type: Any
+    ):
+        # type: (...) -> Iterable["_models.PaginatedStringList"]
+        """Get all dataset versions for a given dataset.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param dataset_id:
+        :type dataset_id: str
+        :param continuation_token_parameter:
+        :type continuation_token_parameter: str
+        :param page_size:
+        :type page_size: int
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: An iterator like instance of either PaginatedStringList or the result of cls(response)
+        :rtype:
+         ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.PaginatedStringList]
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.PaginatedStringList"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+        def prepare_request(next_link=None):
+            if not next_link:
+                
+                request = build_get_all_dataset_versions_request(
+                    subscription_id=subscription_id,
+                    resource_group_name=resource_group_name,
+                    workspace_name=workspace_name,
+                    dataset_id=dataset_id,
+                    continuation_token_parameter=continuation_token_parameter,
+                    page_size=page_size,
+                    template_url=self.get_all_dataset_versions.metadata['url'],
+                )
+                request = _convert_request(request)
+                request.url = self._client.format_url(request.url)
+
+            else:
+                
+                request = build_get_all_dataset_versions_request(
+                    subscription_id=subscription_id,
+                    resource_group_name=resource_group_name,
+                    workspace_name=workspace_name,
+                    dataset_id=dataset_id,
+                    continuation_token_parameter=continuation_token_parameter,
+                    page_size=page_size,
+                    template_url=next_link,
+                )
+                request = _convert_request(request)
+                request.url = self._client.format_url(request.url)
+                request.method = "GET"
+            return request
+
+        def extract_data(pipeline_response):
+            deserialized = self._deserialize("PaginatedStringList", pipeline_response)
+            list_of_elem = deserialized.value
+            if cls:
+                list_of_elem = cls(list_of_elem)
+            return deserialized.next_link or None, iter(list_of_elem)
+
+        def get_next(next_link=None):
+            request = prepare_request(next_link)
+
+            pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+            response = pipeline_response.http_response
+
+            if response.status_code not in [200]:
+                map_error(status_code=response.status_code, response=response, error_map=error_map)
+                error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+                raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+            return pipeline_response
+
+
+        return ItemPaged(
+            get_next, extract_data
+        )
+    get_all_dataset_versions.metadata = {'url': '/dataset/v1.2/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datasets/{datasetId}/versions'}  # type: ignore
+
+    @distributed_trace
+    def get_dataset_by_name(
+        self,
+        subscription_id,  # type: str
+        resource_group_name,  # type: str
+        workspace_name,  # type: str
+        dataset_name,  # type: str
+        version_id=None,  # type: Optional[str]
+        include_latest_definition=True,  # type: Optional[bool]
+        **kwargs  # type: Any
+    ):
+        # type: (...) -> "_models.Dataset"
+        """Get a dataset for a given dataset name.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param dataset_name:
+        :type dataset_name: str
+        :param version_id:
+        :type version_id: str
+        :param include_latest_definition:
+        :type include_latest_definition: bool
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: Dataset, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.Dataset
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.Dataset"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        
+        request = build_get_dataset_by_name_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            dataset_name=dataset_name,
+            version_id=version_id,
+            include_latest_definition=include_latest_definition,
+            template_url=self.get_dataset_by_name.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('Dataset', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    get_dataset_by_name.metadata = {'url': '/dataset/v1.2/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datasets/query/name={datasetName}'}  # type: ignore
+
+
+    @distributed_trace
+    def list(
+        self,
+        subscription_id,  # type: str
+        resource_group_name,  # type: str
+        workspace_name,  # type: str
+        dataset_names=None,  # type: Optional[List[str]]
+        search_text=None,  # type: Optional[str]
+        include_invisible=False,  # type: Optional[bool]
+        status=None,  # type: Optional[str]
+        continuation_token_parameter=None,  # type: Optional[str]
+        page_size=None,  # type: Optional[int]
+        include_latest_definition=False,  # type: Optional[bool]
+        order_by=None,  # type: Optional[str]
+        order_by_asc=False,  # type: Optional[bool]
+        dataset_types=None,  # type: Optional[List[str]]
+        **kwargs  # type: Any
+    ):
+        # type: (...) -> Iterable["_models.PaginatedDatasetList"]
+        """Get a list of datasets.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param dataset_names:
+        :type dataset_names: list[str]
+        :param search_text:
+        :type search_text: str
+        :param include_invisible:
+        :type include_invisible: bool
+        :param status:
+        :type status: str
+        :param continuation_token_parameter:
+        :type continuation_token_parameter: str
+        :param page_size:
+        :type page_size: int
+        :param include_latest_definition:
+        :type include_latest_definition: bool
+        :param order_by:
+        :type order_by: str
+        :param order_by_asc:
+        :type order_by_asc: bool
+        :param dataset_types:
+        :type dataset_types: list[str]
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: An iterator like instance of either PaginatedDatasetList or the result of
+         cls(response)
+        :rtype:
+         ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.PaginatedDatasetList]
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.PaginatedDatasetList"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+        def prepare_request(next_link=None):
+            if not next_link:
+                
+                request = build_list_request(
+                    subscription_id=subscription_id,
+                    resource_group_name=resource_group_name,
+                    workspace_name=workspace_name,
+                    dataset_names=dataset_names,
+                    search_text=search_text,
+                    include_invisible=include_invisible,
+                    status=status,
+                    continuation_token_parameter=continuation_token_parameter,
+                    page_size=page_size,
+                    include_latest_definition=include_latest_definition,
+                    order_by=order_by,
+                    order_by_asc=order_by_asc,
+                    dataset_types=dataset_types,
+                    template_url=self.list.metadata['url'],
+                )
+                request = _convert_request(request)
+                request.url = self._client.format_url(request.url)
+
+            else:
+                
+                request = build_list_request(
+                    subscription_id=subscription_id,
+                    resource_group_name=resource_group_name,
+                    workspace_name=workspace_name,
+                    dataset_names=dataset_names,
+                    search_text=search_text,
+                    include_invisible=include_invisible,
+                    status=status,
+                    continuation_token_parameter=continuation_token_parameter,
+                    page_size=page_size,
+                    include_latest_definition=include_latest_definition,
+                    order_by=order_by,
+                    order_by_asc=order_by_asc,
+                    dataset_types=dataset_types,
+                    template_url=next_link,
+                )
+                request = _convert_request(request)
+                request.url = self._client.format_url(request.url)
+                request.method = "GET"
+            return request
+
+        def extract_data(pipeline_response):
+            deserialized = self._deserialize("PaginatedDatasetList", pipeline_response)
+            list_of_elem = deserialized.value
+            if cls:
+                list_of_elem = cls(list_of_elem)
+            return deserialized.next_link or None, iter(list_of_elem)
+
+        def get_next(next_link=None):
+            request = prepare_request(next_link)
+
+            pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+            response = pipeline_response.http_response
+
+            if response.status_code not in [200]:
+                map_error(status_code=response.status_code, response=response, error_map=error_map)
+                error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+                raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+            return pipeline_response
+
+
+        return ItemPaged(
+            get_next, extract_data
+        )
+    list.metadata = {'url': '/dataset/v1.2/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datasets'}  # type: ignore
+
+    @distributed_trace
+    def register(
+        self,
+        subscription_id,  # type: str
+        resource_group_name,  # type: str
+        workspace_name,  # type: str
+        register_as_pending=False,  # type: Optional[bool]
+        if_exists_ok=True,  # type: Optional[bool]
+        update_definition_if_exists=False,  # type: Optional[bool]
+        with_data_hash=False,  # type: Optional[bool]
+        user_version_id=None,  # type: Optional[str]
+        body=None,  # type: Optional["_models.Dataset"]
+        **kwargs  # type: Any
+    ):
+        # type: (...) -> "_models.Dataset"
+        """Register new dataset.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param register_as_pending:
+        :type register_as_pending: bool
+        :param if_exists_ok:
+        :type if_exists_ok: bool
+        :param update_definition_if_exists:
+        :type update_definition_if_exists: bool
+        :param with_data_hash:
+        :type with_data_hash: bool
+        :param user_version_id:
+        :type user_version_id: str
+        :param body:
+        :type body: ~azure.mgmt.machinelearningservices.models.Dataset
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: Dataset, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.Dataset
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.Dataset"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        content_type = kwargs.pop('content_type', "application/json")  # type: Optional[str]
+
+        if body is not None:
+            _json = self._serialize.body(body, 'Dataset')
+        else:
+            _json = None
+
+        request = build_register_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            content_type=content_type,
+            json=_json,
+            register_as_pending=register_as_pending,
+            if_exists_ok=if_exists_ok,
+            update_definition_if_exists=update_definition_if_exists,
+            with_data_hash=with_data_hash,
+            user_version_id=user_version_id,
+            template_url=self.register.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('Dataset', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    register.metadata = {'url': '/dataset/v1.2/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datasets'}  # type: ignore
+
+
+    @distributed_trace
+    def delete_all_datasets(
+        self,
+        subscription_id,  # type: str
+        resource_group_name,  # type: str
+        workspace_name,  # type: str
+        **kwargs  # type: Any
+    ):
+        # type: (...) -> None
+        """Unregister all datasets in the workspace.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: None, or the result of cls(response)
+        :rtype: None
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType[None]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        
+        request = build_delete_all_datasets_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            template_url=self.delete_all_datasets.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+        response = pipeline_response.http_response
+
+        if response.status_code not in []:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        if cls:
+            return cls(pipeline_response, None, {})
+
+    delete_all_datasets.metadata = {'url': '/dataset/v1.2/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datasets'}  # type: ignore
+
+
+    @distributed_trace
+    def update_dataset(
+        self,
+        subscription_id,  # type: str
+        resource_group_name,  # type: str
+        workspace_name,  # type: str
+        dataset_id,  # type: str
+        force_update=False,  # type: Optional[bool]
+        body=None,  # type: Optional["_models.Dataset"]
+        **kwargs  # type: Any
+    ):
+        # type: (...) -> "_models.Dataset"
+        """Update a dataset.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param dataset_id:
+        :type dataset_id: str
+        :param force_update:
+        :type force_update: bool
+        :param body:
+        :type body: ~azure.mgmt.machinelearningservices.models.Dataset
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: Dataset, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.Dataset
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.Dataset"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        content_type = kwargs.pop('content_type', "application/json")  # type: Optional[str]
+
+        if body is not None:
+            _json = self._serialize.body(body, 'Dataset')
+        else:
+            _json = None
+
+        request = build_update_dataset_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            dataset_id=dataset_id,
+            content_type=content_type,
+            json=_json,
+            force_update=force_update,
+            template_url=self.update_dataset.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('Dataset', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    update_dataset.metadata = {'url': '/dataset/v1.2/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datasets/{datasetId}'}  # type: ignore
+
+
+    @distributed_trace
+    def unregister_dataset(
+        self,
+        subscription_id,  # type: str
+        resource_group_name,  # type: str
+        workspace_name,  # type: str
+        name,  # type: str
+        **kwargs  # type: Any
+    ):
+        # type: (...) -> None
+        """Unregister a dataset.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param name:
+        :type name: str
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: None, or the result of cls(response)
+        :rtype: None
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType[None]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        
+        request = build_unregister_dataset_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            name=name,
+            template_url=self.unregister_dataset.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+        response = pipeline_response.http_response
+
+        if response.status_code not in []:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        if cls:
+            return cls(pipeline_response, None, {})
+
+    unregister_dataset.metadata = {'url': '/dataset/v1.2/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datasets/{name}'}  # type: ignore
+
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/dataset_dataplane/operations/_dataset_v2_operations.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/dataset_dataplane/operations/_dataset_v2_operations.py
new file mode 100644
index 00000000..7f686ab6
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/dataset_dataplane/operations/_dataset_v2_operations.py
@@ -0,0 +1,905 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+import functools
+from typing import TYPE_CHECKING
+import warnings
+
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.paging import ItemPaged
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import HttpResponse
+from azure.core.rest import HttpRequest
+from azure.core.tracing.decorator import distributed_trace
+from azure.mgmt.core.exceptions import ARMErrorFormat
+from msrest import Serializer
+
+from .. import models as _models
+from .._vendor import _convert_request, _format_url_section
+
+if TYPE_CHECKING:
+    # pylint: disable=unused-import,ungrouped-imports
+    from typing import Any, Callable, Dict, Generic, Iterable, List, Optional, TypeVar
+    T = TypeVar('T')
+    ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
+
+_SERIALIZER = Serializer()
+_SERIALIZER.client_side_validation = False
+# fmt: off
+
+def build_create_request(
+    subscription_id,  # type: str
+    resource_group_name,  # type: str
+    workspace_name,  # type: str
+    **kwargs  # type: Any
+):
+    # type: (...) -> HttpRequest
+    content_type = kwargs.pop('content_type', None)  # type: Optional[str]
+    if_exists_update = kwargs.pop('if_exists_update', False)  # type: Optional[bool]
+
+    accept = "application/json"
+    # Construct URL
+    url = kwargs.pop("template_url", '/dataset/v2.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datasets')
+    path_format_arguments = {
+        "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
+        "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
+        "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'),
+    }
+
+    url = _format_url_section(url, **path_format_arguments)
+
+    # Construct parameters
+    query_parameters = kwargs.pop("params", {})  # type: Dict[str, Any]
+    if if_exists_update is not None:
+        query_parameters['ifExistsUpdate'] = _SERIALIZER.query("if_exists_update", if_exists_update, 'bool')
+
+    # Construct headers
+    header_parameters = kwargs.pop("headers", {})  # type: Dict[str, Any]
+    if content_type is not None:
+        header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str')
+    header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
+
+    return HttpRequest(
+        method="POST",
+        url=url,
+        params=query_parameters,
+        headers=header_parameters,
+        **kwargs
+    )
+
+
+def build_delete_all_datasets_request(
+    subscription_id,  # type: str
+    resource_group_name,  # type: str
+    workspace_name,  # type: str
+    **kwargs  # type: Any
+):
+    # type: (...) -> HttpRequest
+    accept = "application/json"
+    # Construct URL
+    url = kwargs.pop("template_url", '/dataset/v2.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datasets')
+    path_format_arguments = {
+        "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
+        "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
+        "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'),
+    }
+
+    url = _format_url_section(url, **path_format_arguments)
+
+    # Construct headers
+    header_parameters = kwargs.pop("headers", {})  # type: Dict[str, Any]
+    header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
+
+    return HttpRequest(
+        method="DELETE",
+        url=url,
+        headers=header_parameters,
+        **kwargs
+    )
+
+
+def build_list_request(
+    subscription_id,  # type: str
+    resource_group_name,  # type: str
+    workspace_name,  # type: str
+    **kwargs  # type: Any
+):
+    # type: (...) -> HttpRequest
+    names = kwargs.pop('names', None)  # type: Optional[List[str]]
+    search_text = kwargs.pop('search_text', None)  # type: Optional[str]
+    continuation_token_parameter = kwargs.pop('continuation_token_parameter', None)  # type: Optional[str]
+    page_size = kwargs.pop('page_size', None)  # type: Optional[int]
+
+    accept = "application/json"
+    # Construct URL
+    url = kwargs.pop("template_url", '/dataset/v2.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datasets')
+    path_format_arguments = {
+        "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
+        "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
+        "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'),
+    }
+
+    url = _format_url_section(url, **path_format_arguments)
+
+    # Construct parameters
+    query_parameters = kwargs.pop("params", {})  # type: Dict[str, Any]
+    if names is not None:
+        query_parameters['names'] = _SERIALIZER.query("names", names, '[str]')
+    if search_text is not None:
+        query_parameters['searchText'] = _SERIALIZER.query("search_text", search_text, 'str')
+    if continuation_token_parameter is not None:
+        query_parameters['continuationToken'] = _SERIALIZER.query("continuation_token_parameter", continuation_token_parameter, 'str')
+    if page_size is not None:
+        query_parameters['pageSize'] = _SERIALIZER.query("page_size", page_size, 'int')
+
+    # Construct headers
+    header_parameters = kwargs.pop("headers", {})  # type: Dict[str, Any]
+    header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
+
+    return HttpRequest(
+        method="GET",
+        url=url,
+        params=query_parameters,
+        headers=header_parameters,
+        **kwargs
+    )
+
+
+def build_delete_dataset_by_name_request(
+    subscription_id,  # type: str
+    resource_group_name,  # type: str
+    workspace_name,  # type: str
+    name,  # type: str
+    version_id,  # type: str
+    **kwargs  # type: Any
+):
+    # type: (...) -> HttpRequest
+    accept = "application/json"
+    # Construct URL
+    url = kwargs.pop("template_url", '/dataset/v2.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datasets/{name}/versions/{versionId}')
+    path_format_arguments = {
+        "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
+        "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
+        "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'),
+        "name": _SERIALIZER.url("name", name, 'str'),
+        "versionId": _SERIALIZER.url("version_id", version_id, 'str'),
+    }
+
+    url = _format_url_section(url, **path_format_arguments)
+
+    # Construct headers
+    header_parameters = kwargs.pop("headers", {})  # type: Dict[str, Any]
+    header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
+
+    return HttpRequest(
+        method="DELETE",
+        url=url,
+        headers=header_parameters,
+        **kwargs
+    )
+
+
+def build_update_dataset_by_name_and_version_request(
+    subscription_id,  # type: str
+    resource_group_name,  # type: str
+    workspace_name,  # type: str
+    name,  # type: str
+    version_id,  # type: str
+    **kwargs  # type: Any
+):
+    # type: (...) -> HttpRequest
+    content_type = kwargs.pop('content_type', None)  # type: Optional[str]
+
+    accept = "application/json"
+    # Construct URL
+    url = kwargs.pop("template_url", '/dataset/v2.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datasets/{name}/versions/{versionId}')
+    path_format_arguments = {
+        "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
+        "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
+        "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'),
+        "name": _SERIALIZER.url("name", name, 'str'),
+        "versionId": _SERIALIZER.url("version_id", version_id, 'str'),
+    }
+
+    url = _format_url_section(url, **path_format_arguments)
+
+    # Construct headers
+    header_parameters = kwargs.pop("headers", {})  # type: Dict[str, Any]
+    if content_type is not None:
+        header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str')
+    header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
+
+    return HttpRequest(
+        method="PUT",
+        url=url,
+        headers=header_parameters,
+        **kwargs
+    )
+
+
+def build_get_dataset_by_id_request(
+    subscription_id,  # type: str
+    resource_group_name,  # type: str
+    workspace_name,  # type: str
+    dataset_id,  # type: str
+    **kwargs  # type: Any
+):
+    # type: (...) -> HttpRequest
+    accept = "application/json"
+    # Construct URL
+    url = kwargs.pop("template_url", '/dataset/v2.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datasets/{datasetId}')
+    path_format_arguments = {
+        "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
+        "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
+        "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'),
+        "datasetId": _SERIALIZER.url("dataset_id", dataset_id, 'str'),
+    }
+
+    url = _format_url_section(url, **path_format_arguments)
+
+    # Construct headers
+    header_parameters = kwargs.pop("headers", {})  # type: Dict[str, Any]
+    header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
+
+    return HttpRequest(
+        method="GET",
+        url=url,
+        headers=header_parameters,
+        **kwargs
+    )
+
+
+def build_update_dataset_request(
+    subscription_id,  # type: str
+    resource_group_name,  # type: str
+    workspace_name,  # type: str
+    dataset_id,  # type: str
+    **kwargs  # type: Any
+):
+    # type: (...) -> HttpRequest
+    content_type = kwargs.pop('content_type', None)  # type: Optional[str]
+
+    accept = "application/json"
+    # Construct URL
+    url = kwargs.pop("template_url", '/dataset/v2.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datasets/{datasetId}')
+    path_format_arguments = {
+        "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
+        "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
+        "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'),
+        "datasetId": _SERIALIZER.url("dataset_id", dataset_id, 'str'),
+    }
+
+    url = _format_url_section(url, **path_format_arguments)
+
+    # Construct headers
+    header_parameters = kwargs.pop("headers", {})  # type: Dict[str, Any]
+    if content_type is not None:
+        header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str')
+    header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
+
+    return HttpRequest(
+        method="PUT",
+        url=url,
+        headers=header_parameters,
+        **kwargs
+    )
+
+
+def build_get_dataset_by_name_request(
+    subscription_id,  # type: str
+    resource_group_name,  # type: str
+    workspace_name,  # type: str
+    dataset_name,  # type: str
+    **kwargs  # type: Any
+):
+    # type: (...) -> HttpRequest
+    version_id = kwargs.pop('version_id', None)  # type: Optional[str]
+
+    accept = "application/json"
+    # Construct URL
+    url = kwargs.pop("template_url", '/dataset/v2.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datasets/query/name={datasetName}')
+    path_format_arguments = {
+        "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
+        "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
+        "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'),
+        "datasetName": _SERIALIZER.url("dataset_name", dataset_name, 'str'),
+    }
+
+    url = _format_url_section(url, **path_format_arguments)
+
+    # Construct parameters
+    query_parameters = kwargs.pop("params", {})  # type: Dict[str, Any]
+    if version_id is not None:
+        query_parameters['versionId'] = _SERIALIZER.query("version_id", version_id, 'str')
+
+    # Construct headers
+    header_parameters = kwargs.pop("headers", {})  # type: Dict[str, Any]
+    header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
+
+    return HttpRequest(
+        method="GET",
+        url=url,
+        params=query_parameters,
+        headers=header_parameters,
+        **kwargs
+    )
+
+# fmt: on
+class DatasetV2Operations(object):
+    """DatasetV2Operations operations.
+
+    You should not instantiate this class directly. Instead, you should create a Client instance that
+    instantiates it for you and attaches it as an attribute.
+
+    :ivar models: Alias to model classes used in this operation group.
+    :type models: ~azure.mgmt.machinelearningservices.models
+    :param client: Client for service requests.
+    :param config: Configuration of service client.
+    :param serializer: An object model serializer.
+    :param deserializer: An object model deserializer.
+    """
+
+    models = _models
+
+    def __init__(self, client, config, serializer, deserializer):
+        self._client = client
+        self._serialize = serializer
+        self._deserialize = deserializer
+        self._config = config
+
+    @distributed_trace
+    def create(
+        self,
+        subscription_id,  # type: str
+        resource_group_name,  # type: str
+        workspace_name,  # type: str
+        if_exists_update=False,  # type: Optional[bool]
+        body=None,  # type: Optional["_models.DatasetV2"]
+        **kwargs  # type: Any
+    ):
+        # type: (...) -> "_models.DatasetV2"
+        """Create new dataset.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param if_exists_update:
+        :type if_exists_update: bool
+        :param body:
+        :type body: ~azure.mgmt.machinelearningservices.models.DatasetV2
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: DatasetV2, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.DatasetV2
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.DatasetV2"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        content_type = kwargs.pop('content_type', "application/json")  # type: Optional[str]
+
+        if body is not None:
+            _json = self._serialize.body(body, 'DatasetV2')
+        else:
+            _json = None
+
+        request = build_create_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            content_type=content_type,
+            json=_json,
+            if_exists_update=if_exists_update,
+            template_url=self.create.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('DatasetV2', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    create.metadata = {'url': '/dataset/v2.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datasets'}  # type: ignore
+
+
+    @distributed_trace
+    def delete_all_datasets(
+        self,
+        subscription_id,  # type: str
+        resource_group_name,  # type: str
+        workspace_name,  # type: str
+        **kwargs  # type: Any
+    ):
+        # type: (...) -> None
+        """Delete all datasets in the workspace.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: None, or the result of cls(response)
+        :rtype: None
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType[None]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        
+        request = build_delete_all_datasets_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            template_url=self.delete_all_datasets.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+        response = pipeline_response.http_response
+
+        if response.status_code not in []:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        if cls:
+            return cls(pipeline_response, None, {})
+
+    delete_all_datasets.metadata = {'url': '/dataset/v2.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datasets'}  # type: ignore
+
+
+    @distributed_trace
+    def list(
+        self,
+        subscription_id,  # type: str
+        resource_group_name,  # type: str
+        workspace_name,  # type: str
+        names=None,  # type: Optional[List[str]]
+        search_text=None,  # type: Optional[str]
+        continuation_token_parameter=None,  # type: Optional[str]
+        page_size=None,  # type: Optional[int]
+        **kwargs  # type: Any
+    ):
+        # type: (...) -> Iterable["_models.PaginatedDatasetV2List"]
+        """Get a list of datasets.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param names:
+        :type names: list[str]
+        :param search_text:
+        :type search_text: str
+        :param continuation_token_parameter:
+        :type continuation_token_parameter: str
+        :param page_size:
+        :type page_size: int
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: An iterator like instance of either PaginatedDatasetV2List or the result of
+         cls(response)
+        :rtype:
+         ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.PaginatedDatasetV2List]
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.PaginatedDatasetV2List"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+        def prepare_request(next_link=None):
+            if not next_link:
+                
+                request = build_list_request(
+                    subscription_id=subscription_id,
+                    resource_group_name=resource_group_name,
+                    workspace_name=workspace_name,
+                    names=names,
+                    search_text=search_text,
+                    continuation_token_parameter=continuation_token_parameter,
+                    page_size=page_size,
+                    template_url=self.list.metadata['url'],
+                )
+                request = _convert_request(request)
+                request.url = self._client.format_url(request.url)
+
+            else:
+                
+                request = build_list_request(
+                    subscription_id=subscription_id,
+                    resource_group_name=resource_group_name,
+                    workspace_name=workspace_name,
+                    names=names,
+                    search_text=search_text,
+                    continuation_token_parameter=continuation_token_parameter,
+                    page_size=page_size,
+                    template_url=next_link,
+                )
+                request = _convert_request(request)
+                request.url = self._client.format_url(request.url)
+                request.method = "GET"
+            return request
+
+        def extract_data(pipeline_response):
+            deserialized = self._deserialize("PaginatedDatasetV2List", pipeline_response)
+            list_of_elem = deserialized.value
+            if cls:
+                list_of_elem = cls(list_of_elem)
+            return deserialized.next_link or None, iter(list_of_elem)
+
+        def get_next(next_link=None):
+            request = prepare_request(next_link)
+
+            pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+            response = pipeline_response.http_response
+
+            if response.status_code not in [200]:
+                map_error(status_code=response.status_code, response=response, error_map=error_map)
+                error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+                raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+            return pipeline_response
+
+
+        return ItemPaged(
+            get_next, extract_data
+        )
+    list.metadata = {'url': '/dataset/v2.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datasets'}  # type: ignore
+
+    @distributed_trace
+    def delete_dataset_by_name(
+        self,
+        subscription_id,  # type: str
+        resource_group_name,  # type: str
+        workspace_name,  # type: str
+        name,  # type: str
+        version_id,  # type: str
+        **kwargs  # type: Any
+    ):
+        # type: (...) -> None
+        """Delete a dataset.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param name:
+        :type name: str
+        :param version_id:
+        :type version_id: str
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: None, or the result of cls(response)
+        :rtype: None
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType[None]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        
+        request = build_delete_dataset_by_name_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            name=name,
+            version_id=version_id,
+            template_url=self.delete_dataset_by_name.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+        response = pipeline_response.http_response
+
+        if response.status_code not in []:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        if cls:
+            return cls(pipeline_response, None, {})
+
+    delete_dataset_by_name.metadata = {'url': '/dataset/v2.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datasets/{name}/versions/{versionId}'}  # type: ignore
+
+
+    @distributed_trace
+    def update_dataset_by_name_and_version(
+        self,
+        subscription_id,  # type: str
+        resource_group_name,  # type: str
+        workspace_name,  # type: str
+        name,  # type: str
+        version_id,  # type: str
+        body=None,  # type: Optional["_models.DatasetV2"]
+        **kwargs  # type: Any
+    ):
+        # type: (...) -> "_models.DatasetV2"
+        """Update a dataset by its name and version.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param name:
+        :type name: str
+        :param version_id:
+        :type version_id: str
+        :param body:
+        :type body: ~azure.mgmt.machinelearningservices.models.DatasetV2
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: DatasetV2, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.DatasetV2
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.DatasetV2"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        content_type = kwargs.pop('content_type', "application/json")  # type: Optional[str]
+
+        if body is not None:
+            _json = self._serialize.body(body, 'DatasetV2')
+        else:
+            _json = None
+
+        request = build_update_dataset_by_name_and_version_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            name=name,
+            version_id=version_id,
+            content_type=content_type,
+            json=_json,
+            template_url=self.update_dataset_by_name_and_version.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('DatasetV2', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    update_dataset_by_name_and_version.metadata = {'url': '/dataset/v2.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datasets/{name}/versions/{versionId}'}  # type: ignore
+
+
+    @distributed_trace
+    def get_dataset_by_id(
+        self,
+        subscription_id,  # type: str
+        resource_group_name,  # type: str
+        workspace_name,  # type: str
+        dataset_id,  # type: str
+        **kwargs  # type: Any
+    ):
+        # type: (...) -> "_models.DatasetV2"
+        """Get a dataset for a given dataset id.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param dataset_id:
+        :type dataset_id: str
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: DatasetV2, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.DatasetV2
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.DatasetV2"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        
+        request = build_get_dataset_by_id_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            dataset_id=dataset_id,
+            template_url=self.get_dataset_by_id.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('DatasetV2', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    get_dataset_by_id.metadata = {'url': '/dataset/v2.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datasets/{datasetId}'}  # type: ignore
+
+
+    @distributed_trace
+    def update_dataset(
+        self,
+        subscription_id,  # type: str
+        resource_group_name,  # type: str
+        workspace_name,  # type: str
+        dataset_id,  # type: str
+        body=None,  # type: Optional["_models.DatasetV2"]
+        **kwargs  # type: Any
+    ):
+        # type: (...) -> "_models.DatasetV2"
+        """Update a dataset.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param dataset_id:
+        :type dataset_id: str
+        :param body:
+        :type body: ~azure.mgmt.machinelearningservices.models.DatasetV2
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: DatasetV2, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.DatasetV2
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.DatasetV2"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        content_type = kwargs.pop('content_type', "application/json")  # type: Optional[str]
+
+        if body is not None:
+            _json = self._serialize.body(body, 'DatasetV2')
+        else:
+            _json = None
+
+        request = build_update_dataset_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            dataset_id=dataset_id,
+            content_type=content_type,
+            json=_json,
+            template_url=self.update_dataset.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('DatasetV2', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    update_dataset.metadata = {'url': '/dataset/v2.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datasets/{datasetId}'}  # type: ignore
+
+
+    @distributed_trace
+    def get_dataset_by_name(
+        self,
+        subscription_id,  # type: str
+        resource_group_name,  # type: str
+        workspace_name,  # type: str
+        dataset_name,  # type: str
+        version_id=None,  # type: Optional[str]
+        **kwargs  # type: Any
+    ):
+        # type: (...) -> "_models.DatasetV2"
+        """Get a dataset for a given dataset name.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param dataset_name:
+        :type dataset_name: str
+        :param version_id:
+        :type version_id: str
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: DatasetV2, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.DatasetV2
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.DatasetV2"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        
+        request = build_get_dataset_by_name_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            dataset_name=dataset_name,
+            version_id=version_id,
+            template_url=self.get_dataset_by_name.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('DatasetV2', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    get_dataset_by_name.metadata = {'url': '/dataset/v2.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datasets/query/name={datasetName}'}  # type: ignore
+
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/dataset_dataplane/operations/_datasets_v1_operations.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/dataset_dataplane/operations/_datasets_v1_operations.py
new file mode 100644
index 00000000..df1b1710
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/dataset_dataplane/operations/_datasets_v1_operations.py
@@ -0,0 +1,1300 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+import functools
+from typing import TYPE_CHECKING
+import warnings
+
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.paging import ItemPaged
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import HttpResponse
+from azure.core.rest import HttpRequest
+from azure.core.tracing.decorator import distributed_trace
+from azure.mgmt.core.exceptions import ARMErrorFormat
+from msrest import Serializer
+
+from .. import models as _models
+from .._vendor import _convert_request, _format_url_section
+
+if TYPE_CHECKING:
+    # pylint: disable=unused-import,ungrouped-imports
+    from typing import Any, Callable, Dict, Generic, Iterable, List, Optional, TypeVar
+    T = TypeVar('T')
+    ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
+
+_SERIALIZER = Serializer()
+_SERIALIZER.client_side_validation = False
+# fmt: off
+
+def build_get_dataset_definition_request(
+    subscription_id,  # type: str
+    resource_group_name,  # type: str
+    workspace_name,  # type: str
+    dataset_id,  # type: str
+    version,  # type: str
+    **kwargs  # type: Any
+):
+    # type: (...) -> HttpRequest
+    accept = "application/json"
+    # Construct URL
+    url = kwargs.pop("template_url", '/dataset/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datasets/{datasetId}/definitions/{version}')
+    path_format_arguments = {
+        "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
+        "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
+        "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'),
+        "datasetId": _SERIALIZER.url("dataset_id", dataset_id, 'str'),
+        "version": _SERIALIZER.url("version", version, 'str'),
+    }
+
+    url = _format_url_section(url, **path_format_arguments)
+
+    # Construct headers
+    header_parameters = kwargs.pop("headers", {})  # type: Dict[str, Any]
+    header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
+
+    return HttpRequest(
+        method="GET",
+        url=url,
+        headers=header_parameters,
+        **kwargs
+    )
+
+
+def build_get_all_dataset_definitions_request(
+    subscription_id,  # type: str
+    resource_group_name,  # type: str
+    workspace_name,  # type: str
+    dataset_id,  # type: str
+    **kwargs  # type: Any
+):
+    # type: (...) -> HttpRequest
+    continuation_token_parameter = kwargs.pop('continuation_token_parameter', None)  # type: Optional[str]
+    page_size = kwargs.pop('page_size', None)  # type: Optional[int]
+
+    accept = "application/json"
+    # Construct URL
+    url = kwargs.pop("template_url", '/dataset/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datasets/{datasetId}/definitions')
+    path_format_arguments = {
+        "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
+        "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
+        "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'),
+        "datasetId": _SERIALIZER.url("dataset_id", dataset_id, 'str'),
+    }
+
+    url = _format_url_section(url, **path_format_arguments)
+
+    # Construct parameters
+    query_parameters = kwargs.pop("params", {})  # type: Dict[str, Any]
+    if continuation_token_parameter is not None:
+        query_parameters['continuationToken'] = _SERIALIZER.query("continuation_token_parameter", continuation_token_parameter, 'str')
+    if page_size is not None:
+        query_parameters['pageSize'] = _SERIALIZER.query("page_size", page_size, 'int')
+
+    # Construct headers
+    header_parameters = kwargs.pop("headers", {})  # type: Dict[str, Any]
+    header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
+
+    return HttpRequest(
+        method="GET",
+        url=url,
+        params=query_parameters,
+        headers=header_parameters,
+        **kwargs
+    )
+
+
+def build_update_definition_request(
+    subscription_id,  # type: str
+    resource_group_name,  # type: str
+    workspace_name,  # type: str
+    dataset_id,  # type: str
+    **kwargs  # type: Any
+):
+    # type: (...) -> HttpRequest
+    content_type = kwargs.pop('content_type', None)  # type: Optional[str]
+    register_as_pending = kwargs.pop('register_as_pending', False)  # type: Optional[bool]
+    force_update = kwargs.pop('force_update', False)  # type: Optional[bool]
+    dataset_type = kwargs.pop('dataset_type', None)  # type: Optional[str]
+    user_version_id = kwargs.pop('user_version_id', None)  # type: Optional[str]
+
+    accept = "application/json"
+    # Construct URL
+    url = kwargs.pop("template_url", '/dataset/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datasets/{datasetId}/definitions')
+    path_format_arguments = {
+        "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
+        "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
+        "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'),
+        "datasetId": _SERIALIZER.url("dataset_id", dataset_id, 'str'),
+    }
+
+    url = _format_url_section(url, **path_format_arguments)
+
+    # Construct parameters
+    query_parameters = kwargs.pop("params", {})  # type: Dict[str, Any]
+    if register_as_pending is not None:
+        query_parameters['registerAsPending'] = _SERIALIZER.query("register_as_pending", register_as_pending, 'bool')
+    if force_update is not None:
+        query_parameters['forceUpdate'] = _SERIALIZER.query("force_update", force_update, 'bool')
+    if dataset_type is not None:
+        query_parameters['datasetType'] = _SERIALIZER.query("dataset_type", dataset_type, 'str')
+    if user_version_id is not None:
+        query_parameters['userVersionId'] = _SERIALIZER.query("user_version_id", user_version_id, 'str')
+
+    # Construct headers
+    header_parameters = kwargs.pop("headers", {})  # type: Dict[str, Any]
+    if content_type is not None:
+        header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str')
+    header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
+
+    return HttpRequest(
+        method="POST",
+        url=url,
+        params=query_parameters,
+        headers=header_parameters,
+        **kwargs
+    )
+
+
+def build_get_all_dataset_versions_request(
+    subscription_id,  # type: str
+    resource_group_name,  # type: str
+    workspace_name,  # type: str
+    dataset_id,  # type: str
+    **kwargs  # type: Any
+):
+    # type: (...) -> HttpRequest
+    continuation_token_parameter = kwargs.pop('continuation_token_parameter', None)  # type: Optional[str]
+    page_size = kwargs.pop('page_size', None)  # type: Optional[int]
+
+    accept = "application/json"
+    # Construct URL
+    url = kwargs.pop("template_url", '/dataset/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datasets/{datasetId}/versions')
+    path_format_arguments = {
+        "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
+        "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
+        "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'),
+        "datasetId": _SERIALIZER.url("dataset_id", dataset_id, 'str'),
+    }
+
+    url = _format_url_section(url, **path_format_arguments)
+
+    # Construct parameters
+    query_parameters = kwargs.pop("params", {})  # type: Dict[str, Any]
+    if continuation_token_parameter is not None:
+        query_parameters['continuationToken'] = _SERIALIZER.query("continuation_token_parameter", continuation_token_parameter, 'str')
+    if page_size is not None:
+        query_parameters['pageSize'] = _SERIALIZER.query("page_size", page_size, 'int')
+
+    # Construct headers
+    header_parameters = kwargs.pop("headers", {})  # type: Dict[str, Any]
+    header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
+
+    return HttpRequest(
+        method="GET",
+        url=url,
+        params=query_parameters,
+        headers=header_parameters,
+        **kwargs
+    )
+
+
+def build_get_dataset_by_name_request(
+    subscription_id,  # type: str
+    resource_group_name,  # type: str
+    workspace_name,  # type: str
+    dataset_name,  # type: str
+    **kwargs  # type: Any
+):
+    # type: (...) -> HttpRequest
+    version_id = kwargs.pop('version_id', None)  # type: Optional[str]
+    include_latest_definition = kwargs.pop('include_latest_definition', True)  # type: Optional[bool]
+
+    accept = "application/json"
+    # Construct URL
+    url = kwargs.pop("template_url", '/dataset/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datasets/query/name={datasetName}')
+    path_format_arguments = {
+        "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
+        "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
+        "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'),
+        "datasetName": _SERIALIZER.url("dataset_name", dataset_name, 'str'),
+    }
+
+    url = _format_url_section(url, **path_format_arguments)
+
+    # Construct parameters
+    query_parameters = kwargs.pop("params", {})  # type: Dict[str, Any]
+    if version_id is not None:
+        query_parameters['versionId'] = _SERIALIZER.query("version_id", version_id, 'str')
+    if include_latest_definition is not None:
+        query_parameters['includeLatestDefinition'] = _SERIALIZER.query("include_latest_definition", include_latest_definition, 'bool')
+
+    # Construct headers
+    header_parameters = kwargs.pop("headers", {})  # type: Dict[str, Any]
+    header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
+
+    return HttpRequest(
+        method="GET",
+        url=url,
+        params=query_parameters,
+        headers=header_parameters,
+        **kwargs
+    )
+
+
+def build_list_request(
+    subscription_id,  # type: str
+    resource_group_name,  # type: str
+    workspace_name,  # type: str
+    **kwargs  # type: Any
+):
+    # type: (...) -> HttpRequest
+    dataset_names = kwargs.pop('dataset_names', None)  # type: Optional[List[str]]
+    search_text = kwargs.pop('search_text', None)  # type: Optional[str]
+    include_invisible = kwargs.pop('include_invisible', False)  # type: Optional[bool]
+    status = kwargs.pop('status', None)  # type: Optional[str]
+    continuation_token_parameter = kwargs.pop('continuation_token_parameter', None)  # type: Optional[str]
+    page_size = kwargs.pop('page_size', None)  # type: Optional[int]
+    include_latest_definition = kwargs.pop('include_latest_definition', False)  # type: Optional[bool]
+    order_by = kwargs.pop('order_by', None)  # type: Optional[str]
+    order_by_asc = kwargs.pop('order_by_asc', False)  # type: Optional[bool]
+    dataset_types = kwargs.pop('dataset_types', None)  # type: Optional[List[str]]
+
+    accept = "application/json"
+    # Construct URL
+    url = kwargs.pop("template_url", '/dataset/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datasets')
+    path_format_arguments = {
+        "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
+        "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
+        "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'),
+    }
+
+    url = _format_url_section(url, **path_format_arguments)
+
+    # Construct parameters
+    query_parameters = kwargs.pop("params", {})  # type: Dict[str, Any]
+    if dataset_names is not None:
+        query_parameters['datasetNames'] = _SERIALIZER.query("dataset_names", dataset_names, '[str]')
+    if search_text is not None:
+        query_parameters['searchText'] = _SERIALIZER.query("search_text", search_text, 'str')
+    if include_invisible is not None:
+        query_parameters['includeInvisible'] = _SERIALIZER.query("include_invisible", include_invisible, 'bool')
+    if status is not None:
+        query_parameters['status'] = _SERIALIZER.query("status", status, 'str')
+    if continuation_token_parameter is not None:
+        query_parameters['continuationToken'] = _SERIALIZER.query("continuation_token_parameter", continuation_token_parameter, 'str')
+    if page_size is not None:
+        query_parameters['pageSize'] = _SERIALIZER.query("page_size", page_size, 'int')
+    if include_latest_definition is not None:
+        query_parameters['includeLatestDefinition'] = _SERIALIZER.query("include_latest_definition", include_latest_definition, 'bool')
+    if order_by is not None:
+        query_parameters['orderBy'] = _SERIALIZER.query("order_by", order_by, 'str')
+    if order_by_asc is not None:
+        query_parameters['orderByAsc'] = _SERIALIZER.query("order_by_asc", order_by_asc, 'bool')
+    if dataset_types is not None:
+        query_parameters['datasetTypes'] = _SERIALIZER.query("dataset_types", dataset_types, '[str]')
+
+    # Construct headers
+    header_parameters = kwargs.pop("headers", {})  # type: Dict[str, Any]
+    header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
+
+    return HttpRequest(
+        method="GET",
+        url=url,
+        params=query_parameters,
+        headers=header_parameters,
+        **kwargs
+    )
+
+
+def build_register_request(
+    subscription_id,  # type: str
+    resource_group_name,  # type: str
+    workspace_name,  # type: str
+    **kwargs  # type: Any
+):
+    # type: (...) -> HttpRequest
+    content_type = kwargs.pop('content_type', None)  # type: Optional[str]
+    register_as_pending = kwargs.pop('register_as_pending', False)  # type: Optional[bool]
+    if_exists_ok = kwargs.pop('if_exists_ok', True)  # type: Optional[bool]
+    update_definition_if_exists = kwargs.pop('update_definition_if_exists', False)  # type: Optional[bool]
+    with_data_hash = kwargs.pop('with_data_hash', False)  # type: Optional[bool]
+    user_version_id = kwargs.pop('user_version_id', None)  # type: Optional[str]
+
+    accept = "application/json"
+    # Construct URL
+    url = kwargs.pop("template_url", '/dataset/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datasets')
+    path_format_arguments = {
+        "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
+        "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
+        "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'),
+    }
+
+    url = _format_url_section(url, **path_format_arguments)
+
+    # Construct parameters
+    query_parameters = kwargs.pop("params", {})  # type: Dict[str, Any]
+    if register_as_pending is not None:
+        query_parameters['registerAsPending'] = _SERIALIZER.query("register_as_pending", register_as_pending, 'bool')
+    if if_exists_ok is not None:
+        query_parameters['ifExistsOk'] = _SERIALIZER.query("if_exists_ok", if_exists_ok, 'bool')
+    if update_definition_if_exists is not None:
+        query_parameters['updateDefinitionIfExists'] = _SERIALIZER.query("update_definition_if_exists", update_definition_if_exists, 'bool')
+    if with_data_hash is not None:
+        query_parameters['withDataHash'] = _SERIALIZER.query("with_data_hash", with_data_hash, 'bool')
+    if user_version_id is not None:
+        query_parameters['userVersionId'] = _SERIALIZER.query("user_version_id", user_version_id, 'str')
+
+    # Construct headers
+    header_parameters = kwargs.pop("headers", {})  # type: Dict[str, Any]
+    if content_type is not None:
+        header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str')
+    header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
+
+    return HttpRequest(
+        method="POST",
+        url=url,
+        params=query_parameters,
+        headers=header_parameters,
+        **kwargs
+    )
+
+
+def build_delete_all_datasets_request(
+    subscription_id,  # type: str
+    resource_group_name,  # type: str
+    workspace_name,  # type: str
+    **kwargs  # type: Any
+):
+    # type: (...) -> HttpRequest
+    accept = "application/json"
+    # Construct URL
+    url = kwargs.pop("template_url", '/dataset/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datasets')
+    path_format_arguments = {
+        "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
+        "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
+        "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'),
+    }
+
+    url = _format_url_section(url, **path_format_arguments)
+
+    # Construct headers
+    header_parameters = kwargs.pop("headers", {})  # type: Dict[str, Any]
+    header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
+
+    return HttpRequest(
+        method="DELETE",
+        url=url,
+        headers=header_parameters,
+        **kwargs
+    )
+
+
+def build_update_dataset_request(
+    subscription_id,  # type: str
+    resource_group_name,  # type: str
+    workspace_name,  # type: str
+    dataset_id,  # type: str
+    **kwargs  # type: Any
+):
+    # type: (...) -> HttpRequest
+    content_type = kwargs.pop('content_type', None)  # type: Optional[str]
+    force_update = kwargs.pop('force_update', False)  # type: Optional[bool]
+
+    accept = "application/json"
+    # Construct URL
+    url = kwargs.pop("template_url", '/dataset/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datasets/{datasetId}')
+    path_format_arguments = {
+        "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
+        "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
+        "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'),
+        "datasetId": _SERIALIZER.url("dataset_id", dataset_id, 'str'),
+    }
+
+    url = _format_url_section(url, **path_format_arguments)
+
+    # Construct parameters
+    query_parameters = kwargs.pop("params", {})  # type: Dict[str, Any]
+    if force_update is not None:
+        query_parameters['forceUpdate'] = _SERIALIZER.query("force_update", force_update, 'bool')
+
+    # Construct headers
+    header_parameters = kwargs.pop("headers", {})  # type: Dict[str, Any]
+    if content_type is not None:
+        header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str')
+    header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
+
+    return HttpRequest(
+        method="PUT",
+        url=url,
+        params=query_parameters,
+        headers=header_parameters,
+        **kwargs
+    )
+
+
+def build_unregister_dataset_request(
+    subscription_id,  # type: str
+    resource_group_name,  # type: str
+    workspace_name,  # type: str
+    name,  # type: str
+    **kwargs  # type: Any
+):
+    # type: (...) -> HttpRequest
+    accept = "application/json"
+    # Construct URL
+    url = kwargs.pop("template_url", '/dataset/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datasets/{name}')
+    path_format_arguments = {
+        "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
+        "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
+        "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'),
+        "name": _SERIALIZER.url("name", name, 'str'),
+    }
+
+    url = _format_url_section(url, **path_format_arguments)
+
+    # Construct headers
+    header_parameters = kwargs.pop("headers", {})  # type: Dict[str, Any]
+    header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
+
+    return HttpRequest(
+        method="DELETE",
+        url=url,
+        headers=header_parameters,
+        **kwargs
+    )
+
+# fmt: on
+class DatasetsV1Operations(object):
+    """DatasetsV1Operations operations.
+
+    You should not instantiate this class directly. Instead, you should create a Client instance that
+    instantiates it for you and attaches it as an attribute.
+
+    :ivar models: Alias to model classes used in this operation group.
+    :type models: ~azure.mgmt.machinelearningservices.models
+    :param client: Client for service requests.
+    :param config: Configuration of service client.
+    :param serializer: An object model serializer.
+    :param deserializer: An object model deserializer.
+    """
+
+    models = _models
+
+    def __init__(self, client, config, serializer, deserializer):
+        self._client = client
+        self._serialize = serializer
+        self._deserialize = deserializer
+        self._config = config
+
+    @distributed_trace
+    def get_dataset_definition(
+        self,
+        subscription_id,  # type: str
+        resource_group_name,  # type: str
+        workspace_name,  # type: str
+        dataset_id,  # type: str
+        version,  # type: str
+        **kwargs  # type: Any
+    ):
+        # type: (...) -> "_models.DatasetDefinition"
+        """Get a specific dataset definition.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param dataset_id:
+        :type dataset_id: str
+        :param version:
+        :type version: str
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: DatasetDefinition, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.DatasetDefinition
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.DatasetDefinition"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        
+        request = build_get_dataset_definition_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            dataset_id=dataset_id,
+            version=version,
+            template_url=self.get_dataset_definition.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('DatasetDefinition', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    get_dataset_definition.metadata = {'url': '/dataset/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datasets/{datasetId}/definitions/{version}'}  # type: ignore
+
+
+    @distributed_trace
+    def get_all_dataset_definitions(
+        self,
+        subscription_id,  # type: str
+        resource_group_name,  # type: str
+        workspace_name,  # type: str
+        dataset_id,  # type: str
+        continuation_token_parameter=None,  # type: Optional[str]
+        page_size=None,  # type: Optional[int]
+        **kwargs  # type: Any
+    ):
+        # type: (...) -> Iterable["_models.PaginatedDatasetDefinitionList"]
+        """Get all dataset definitions for a given dataset.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param dataset_id:
+        :type dataset_id: str
+        :param continuation_token_parameter:
+        :type continuation_token_parameter: str
+        :param page_size:
+        :type page_size: int
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: An iterator like instance of either PaginatedDatasetDefinitionList or the result of
+         cls(response)
+        :rtype:
+         ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.PaginatedDatasetDefinitionList]
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.PaginatedDatasetDefinitionList"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+        def prepare_request(next_link=None):
+            if not next_link:
+                
+                request = build_get_all_dataset_definitions_request(
+                    subscription_id=subscription_id,
+                    resource_group_name=resource_group_name,
+                    workspace_name=workspace_name,
+                    dataset_id=dataset_id,
+                    continuation_token_parameter=continuation_token_parameter,
+                    page_size=page_size,
+                    template_url=self.get_all_dataset_definitions.metadata['url'],
+                )
+                request = _convert_request(request)
+                request.url = self._client.format_url(request.url)
+
+            else:
+                
+                request = build_get_all_dataset_definitions_request(
+                    subscription_id=subscription_id,
+                    resource_group_name=resource_group_name,
+                    workspace_name=workspace_name,
+                    dataset_id=dataset_id,
+                    continuation_token_parameter=continuation_token_parameter,
+                    page_size=page_size,
+                    template_url=next_link,
+                )
+                request = _convert_request(request)
+                request.url = self._client.format_url(request.url)
+                request.method = "GET"
+            return request
+
+        def extract_data(pipeline_response):
+            deserialized = self._deserialize("PaginatedDatasetDefinitionList", pipeline_response)
+            list_of_elem = deserialized.value
+            if cls:
+                list_of_elem = cls(list_of_elem)
+            return deserialized.next_link or None, iter(list_of_elem)
+
+        def get_next(next_link=None):
+            request = prepare_request(next_link)
+
+            pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+            response = pipeline_response.http_response
+
+            if response.status_code not in [200]:
+                map_error(status_code=response.status_code, response=response, error_map=error_map)
+                error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+                raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+            return pipeline_response
+
+
+        return ItemPaged(
+            get_next, extract_data
+        )
+    get_all_dataset_definitions.metadata = {'url': '/dataset/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datasets/{datasetId}/definitions'}  # type: ignore
+
+    @distributed_trace
+    def update_definition(
+        self,
+        subscription_id,  # type: str
+        resource_group_name,  # type: str
+        workspace_name,  # type: str
+        dataset_id,  # type: str
+        register_as_pending=False,  # type: Optional[bool]
+        force_update=False,  # type: Optional[bool]
+        dataset_type=None,  # type: Optional[str]
+        user_version_id=None,  # type: Optional[str]
+        body=None,  # type: Optional["_models.DatasetDefinition"]
+        **kwargs  # type: Any
+    ):
+        # type: (...) -> "_models.Dataset"
+        """Update a dataset definition.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param dataset_id:
+        :type dataset_id: str
+        :param register_as_pending:
+        :type register_as_pending: bool
+        :param force_update:
+        :type force_update: bool
+        :param dataset_type:
+        :type dataset_type: str
+        :param user_version_id:
+        :type user_version_id: str
+        :param body:
+        :type body: ~azure.mgmt.machinelearningservices.models.DatasetDefinition
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: Dataset, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.Dataset
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.Dataset"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        content_type = kwargs.pop('content_type', "application/json")  # type: Optional[str]
+
+        if body is not None:
+            _json = self._serialize.body(body, 'DatasetDefinition')
+        else:
+            _json = None
+
+        request = build_update_definition_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            dataset_id=dataset_id,
+            content_type=content_type,
+            json=_json,
+            register_as_pending=register_as_pending,
+            force_update=force_update,
+            dataset_type=dataset_type,
+            user_version_id=user_version_id,
+            template_url=self.update_definition.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('Dataset', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    update_definition.metadata = {'url': '/dataset/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datasets/{datasetId}/definitions'}  # type: ignore
+
+
+    @distributed_trace
+    def get_all_dataset_versions(
+        self,
+        subscription_id,  # type: str
+        resource_group_name,  # type: str
+        workspace_name,  # type: str
+        dataset_id,  # type: str
+        continuation_token_parameter=None,  # type: Optional[str]
+        page_size=None,  # type: Optional[int]
+        **kwargs  # type: Any
+    ):
+        # type: (...) -> Iterable["_models.PaginatedStringList"]
+        """Get all dataset versions for a given dataset.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param dataset_id:
+        :type dataset_id: str
+        :param continuation_token_parameter:
+        :type continuation_token_parameter: str
+        :param page_size:
+        :type page_size: int
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: An iterator like instance of either PaginatedStringList or the result of cls(response)
+        :rtype:
+         ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.PaginatedStringList]
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.PaginatedStringList"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+        def prepare_request(next_link=None):
+            if not next_link:
+                
+                request = build_get_all_dataset_versions_request(
+                    subscription_id=subscription_id,
+                    resource_group_name=resource_group_name,
+                    workspace_name=workspace_name,
+                    dataset_id=dataset_id,
+                    continuation_token_parameter=continuation_token_parameter,
+                    page_size=page_size,
+                    template_url=self.get_all_dataset_versions.metadata['url'],
+                )
+                request = _convert_request(request)
+                request.url = self._client.format_url(request.url)
+
+            else:
+                
+                request = build_get_all_dataset_versions_request(
+                    subscription_id=subscription_id,
+                    resource_group_name=resource_group_name,
+                    workspace_name=workspace_name,
+                    dataset_id=dataset_id,
+                    continuation_token_parameter=continuation_token_parameter,
+                    page_size=page_size,
+                    template_url=next_link,
+                )
+                request = _convert_request(request)
+                request.url = self._client.format_url(request.url)
+                request.method = "GET"
+            return request
+
+        def extract_data(pipeline_response):
+            deserialized = self._deserialize("PaginatedStringList", pipeline_response)
+            list_of_elem = deserialized.value
+            if cls:
+                list_of_elem = cls(list_of_elem)
+            return deserialized.next_link or None, iter(list_of_elem)
+
+        def get_next(next_link=None):
+            request = prepare_request(next_link)
+
+            pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+            response = pipeline_response.http_response
+
+            if response.status_code not in [200]:
+                map_error(status_code=response.status_code, response=response, error_map=error_map)
+                error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+                raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+            return pipeline_response
+
+
+        return ItemPaged(
+            get_next, extract_data
+        )
+    get_all_dataset_versions.metadata = {'url': '/dataset/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datasets/{datasetId}/versions'}  # type: ignore
+
+    @distributed_trace
+    def get_dataset_by_name(
+        self,
+        subscription_id,  # type: str
+        resource_group_name,  # type: str
+        workspace_name,  # type: str
+        dataset_name,  # type: str
+        version_id=None,  # type: Optional[str]
+        include_latest_definition=True,  # type: Optional[bool]
+        **kwargs  # type: Any
+    ):
+        # type: (...) -> "_models.Dataset"
+        """Get a dataset for a given dataset name.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param dataset_name:
+        :type dataset_name: str
+        :param version_id:
+        :type version_id: str
+        :param include_latest_definition:
+        :type include_latest_definition: bool
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: Dataset, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.Dataset
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.Dataset"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        
+        request = build_get_dataset_by_name_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            dataset_name=dataset_name,
+            version_id=version_id,
+            include_latest_definition=include_latest_definition,
+            template_url=self.get_dataset_by_name.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('Dataset', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    get_dataset_by_name.metadata = {'url': '/dataset/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datasets/query/name={datasetName}'}  # type: ignore
+
+
+    @distributed_trace
+    def list(
+        self,
+        subscription_id,  # type: str
+        resource_group_name,  # type: str
+        workspace_name,  # type: str
+        dataset_names=None,  # type: Optional[List[str]]
+        search_text=None,  # type: Optional[str]
+        include_invisible=False,  # type: Optional[bool]
+        status=None,  # type: Optional[str]
+        continuation_token_parameter=None,  # type: Optional[str]
+        page_size=None,  # type: Optional[int]
+        include_latest_definition=False,  # type: Optional[bool]
+        order_by=None,  # type: Optional[str]
+        order_by_asc=False,  # type: Optional[bool]
+        dataset_types=None,  # type: Optional[List[str]]
+        **kwargs  # type: Any
+    ):
+        # type: (...) -> Iterable["_models.PaginatedDatasetList"]
+        """Get a list of datasets.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param dataset_names:
+        :type dataset_names: list[str]
+        :param search_text:
+        :type search_text: str
+        :param include_invisible:
+        :type include_invisible: bool
+        :param status:
+        :type status: str
+        :param continuation_token_parameter:
+        :type continuation_token_parameter: str
+        :param page_size:
+        :type page_size: int
+        :param include_latest_definition:
+        :type include_latest_definition: bool
+        :param order_by:
+        :type order_by: str
+        :param order_by_asc:
+        :type order_by_asc: bool
+        :param dataset_types:
+        :type dataset_types: list[str]
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: An iterator like instance of either PaginatedDatasetList or the result of
+         cls(response)
+        :rtype:
+         ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.PaginatedDatasetList]
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.PaginatedDatasetList"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+        def prepare_request(next_link=None):
+            if not next_link:
+                
+                request = build_list_request(
+                    subscription_id=subscription_id,
+                    resource_group_name=resource_group_name,
+                    workspace_name=workspace_name,
+                    dataset_names=dataset_names,
+                    search_text=search_text,
+                    include_invisible=include_invisible,
+                    status=status,
+                    continuation_token_parameter=continuation_token_parameter,
+                    page_size=page_size,
+                    include_latest_definition=include_latest_definition,
+                    order_by=order_by,
+                    order_by_asc=order_by_asc,
+                    dataset_types=dataset_types,
+                    template_url=self.list.metadata['url'],
+                )
+                request = _convert_request(request)
+                request.url = self._client.format_url(request.url)
+
+            else:
+                
+                request = build_list_request(
+                    subscription_id=subscription_id,
+                    resource_group_name=resource_group_name,
+                    workspace_name=workspace_name,
+                    dataset_names=dataset_names,
+                    search_text=search_text,
+                    include_invisible=include_invisible,
+                    status=status,
+                    continuation_token_parameter=continuation_token_parameter,
+                    page_size=page_size,
+                    include_latest_definition=include_latest_definition,
+                    order_by=order_by,
+                    order_by_asc=order_by_asc,
+                    dataset_types=dataset_types,
+                    template_url=next_link,
+                )
+                request = _convert_request(request)
+                request.url = self._client.format_url(request.url)
+                request.method = "GET"
+            return request
+
+        def extract_data(pipeline_response):
+            deserialized = self._deserialize("PaginatedDatasetList", pipeline_response)
+            list_of_elem = deserialized.value
+            if cls:
+                list_of_elem = cls(list_of_elem)
+            return deserialized.next_link or None, iter(list_of_elem)
+
+        def get_next(next_link=None):
+            request = prepare_request(next_link)
+
+            pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+            response = pipeline_response.http_response
+
+            if response.status_code not in [200]:
+                map_error(status_code=response.status_code, response=response, error_map=error_map)
+                error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+                raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+            return pipeline_response
+
+
+        return ItemPaged(
+            get_next, extract_data
+        )
+    list.metadata = {'url': '/dataset/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datasets'}  # type: ignore
+
+    @distributed_trace
+    def register(
+        self,
+        subscription_id,  # type: str
+        resource_group_name,  # type: str
+        workspace_name,  # type: str
+        register_as_pending=False,  # type: Optional[bool]
+        if_exists_ok=True,  # type: Optional[bool]
+        update_definition_if_exists=False,  # type: Optional[bool]
+        with_data_hash=False,  # type: Optional[bool]
+        user_version_id=None,  # type: Optional[str]
+        body=None,  # type: Optional["_models.Dataset"]
+        **kwargs  # type: Any
+    ):
+        # type: (...) -> "_models.Dataset"
+        """Register new dataset.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param register_as_pending:
+        :type register_as_pending: bool
+        :param if_exists_ok:
+        :type if_exists_ok: bool
+        :param update_definition_if_exists:
+        :type update_definition_if_exists: bool
+        :param with_data_hash:
+        :type with_data_hash: bool
+        :param user_version_id:
+        :type user_version_id: str
+        :param body:
+        :type body: ~azure.mgmt.machinelearningservices.models.Dataset
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: Dataset, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.Dataset
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.Dataset"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        content_type = kwargs.pop('content_type', "application/json")  # type: Optional[str]
+
+        if body is not None:
+            _json = self._serialize.body(body, 'Dataset')
+        else:
+            _json = None
+
+        request = build_register_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            content_type=content_type,
+            json=_json,
+            register_as_pending=register_as_pending,
+            if_exists_ok=if_exists_ok,
+            update_definition_if_exists=update_definition_if_exists,
+            with_data_hash=with_data_hash,
+            user_version_id=user_version_id,
+            template_url=self.register.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('Dataset', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    register.metadata = {'url': '/dataset/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datasets'}  # type: ignore
+
+
+    @distributed_trace
+    def delete_all_datasets(
+        self,
+        subscription_id,  # type: str
+        resource_group_name,  # type: str
+        workspace_name,  # type: str
+        **kwargs  # type: Any
+    ):
+        # type: (...) -> None
+        """Unregister all datasets in the workspace.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: None, or the result of cls(response)
+        :rtype: None
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType[None]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        
+        request = build_delete_all_datasets_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            template_url=self.delete_all_datasets.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+        response = pipeline_response.http_response
+
+        if response.status_code not in []:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        if cls:
+            return cls(pipeline_response, None, {})
+
+    delete_all_datasets.metadata = {'url': '/dataset/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datasets'}  # type: ignore
+
+
+    @distributed_trace
+    def update_dataset(
+        self,
+        subscription_id,  # type: str
+        resource_group_name,  # type: str
+        workspace_name,  # type: str
+        dataset_id,  # type: str
+        force_update=False,  # type: Optional[bool]
+        body=None,  # type: Optional["_models.Dataset"]
+        **kwargs  # type: Any
+    ):
+        # type: (...) -> "_models.Dataset"
+        """Update a dataset.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param dataset_id:
+        :type dataset_id: str
+        :param force_update:
+        :type force_update: bool
+        :param body:
+        :type body: ~azure.mgmt.machinelearningservices.models.Dataset
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: Dataset, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.Dataset
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.Dataset"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        content_type = kwargs.pop('content_type', "application/json")  # type: Optional[str]
+
+        if body is not None:
+            _json = self._serialize.body(body, 'Dataset')
+        else:
+            _json = None
+
+        request = build_update_dataset_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            dataset_id=dataset_id,
+            content_type=content_type,
+            json=_json,
+            force_update=force_update,
+            template_url=self.update_dataset.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('Dataset', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    update_dataset.metadata = {'url': '/dataset/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datasets/{datasetId}'}  # type: ignore
+
+
+    @distributed_trace
+    def unregister_dataset(
+        self,
+        subscription_id,  # type: str
+        resource_group_name,  # type: str
+        workspace_name,  # type: str
+        name,  # type: str
+        **kwargs  # type: Any
+    ):
+        # type: (...) -> None
+        """Unregister a dataset.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param name:
+        :type name: str
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: None, or the result of cls(response)
+        :rtype: None
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType[None]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        
+        request = build_unregister_dataset_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            name=name,
+            template_url=self.unregister_dataset.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+        response = pipeline_response.http_response
+
+        if response.status_code not in []:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        if cls:
+            return cls(pipeline_response, None, {})
+
+    unregister_dataset.metadata = {'url': '/dataset/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datasets/{name}'}  # type: ignore
+
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/dataset_dataplane/operations/_delete_operations.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/dataset_dataplane/operations/_delete_operations.py
new file mode 100644
index 00000000..e1cd955c
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/dataset_dataplane/operations/_delete_operations.py
@@ -0,0 +1,145 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+import functools
+from typing import TYPE_CHECKING
+import warnings
+
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import HttpResponse
+from azure.core.rest import HttpRequest
+from azure.core.tracing.decorator import distributed_trace
+from azure.mgmt.core.exceptions import ARMErrorFormat
+from msrest import Serializer
+
+from .. import models as _models
+from .._vendor import _convert_request, _format_url_section
+
+if TYPE_CHECKING:
+    # pylint: disable=unused-import,ungrouped-imports
+    from typing import Any, Callable, Dict, Generic, Optional, TypeVar
+    T = TypeVar('T')
+    ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
+
+_SERIALIZER = Serializer()
+_SERIALIZER.client_side_validation = False
+# fmt: off
+
+def build_data_container_request(
+    subscription_id,  # type: str
+    resource_group_name,  # type: str
+    workspace_name,  # type: str
+    name,  # type: str
+    **kwargs  # type: Any
+):
+    # type: (...) -> HttpRequest
+    accept = "application/json"
+    # Construct URL
+    url = kwargs.pop("template_url", '/data/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datacontainer/{name}')
+    path_format_arguments = {
+        "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
+        "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
+        "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'),
+        "name": _SERIALIZER.url("name", name, 'str'),
+    }
+
+    url = _format_url_section(url, **path_format_arguments)
+
+    # Construct headers
+    header_parameters = kwargs.pop("headers", {})  # type: Dict[str, Any]
+    header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
+
+    return HttpRequest(
+        method="DELETE",
+        url=url,
+        headers=header_parameters,
+        **kwargs
+    )
+
+# fmt: on
+class DeleteOperations(object):
+    """DeleteOperations operations.
+
+    You should not instantiate this class directly. Instead, you should create a Client instance that
+    instantiates it for you and attaches it as an attribute.
+
+    :ivar models: Alias to model classes used in this operation group.
+    :type models: ~azure.mgmt.machinelearningservices.models
+    :param client: Client for service requests.
+    :param config: Configuration of service client.
+    :param serializer: An object model serializer.
+    :param deserializer: An object model deserializer.
+    """
+
+    models = _models
+
+    def __init__(self, client, config, serializer, deserializer):
+        self._client = client
+        self._serialize = serializer
+        self._deserialize = deserializer
+        self._config = config
+
+    @distributed_trace
+    def data_container(
+        self,
+        subscription_id,  # type: str
+        resource_group_name,  # type: str
+        workspace_name,  # type: str
+        name,  # type: str
+        **kwargs  # type: Any
+    ):
+        # type: (...) -> "_models.HttpResponseMessage"
+        """data_container.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param name:
+        :type name: str
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: HttpResponseMessage, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.HttpResponseMessage
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.HttpResponseMessage"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        
+        request = build_data_container_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            name=name,
+            template_url=self.data_container.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('HttpResponseMessage', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    data_container.metadata = {'url': '/data/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datacontainer/{name}'}  # type: ignore
+
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/dataset_dataplane/operations/_get_operation_status_operations.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/dataset_dataplane/operations/_get_operation_status_operations.py
new file mode 100644
index 00000000..085f9749
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/dataset_dataplane/operations/_get_operation_status_operations.py
@@ -0,0 +1,212 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+import functools
+from typing import TYPE_CHECKING
+import warnings
+
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import HttpResponse
+from azure.core.polling import LROPoller, NoPolling, PollingMethod
+from azure.core.rest import HttpRequest
+from azure.core.tracing.decorator import distributed_trace
+from azure.mgmt.core.exceptions import ARMErrorFormat
+from azure.mgmt.core.polling.arm_polling import ARMPolling
+from msrest import Serializer
+
+from .. import models as _models
+from .._vendor import _convert_request, _format_url_section
+
+if TYPE_CHECKING:
+    # pylint: disable=unused-import,ungrouped-imports
+    from typing import Any, Callable, Dict, Generic, Optional, TypeVar, Union
+    T = TypeVar('T')
+    ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
+
+_SERIALIZER = Serializer()
+_SERIALIZER.client_side_validation = False
+# fmt: off
+
+def build_get_dataset_operation_status_request_initial(
+    subscription_id,  # type: str
+    resource_group_name,  # type: str
+    workspace_name,  # type: str
+    operation_id,  # type: str
+    **kwargs  # type: Any
+):
+    # type: (...) -> HttpRequest
+    accept = "application/json"
+    # Construct URL
+    url = kwargs.pop("template_url", '/dataset/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/operations/{operationId}')
+    path_format_arguments = {
+        "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
+        "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
+        "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'),
+        "operationId": _SERIALIZER.url("operation_id", operation_id, 'str'),
+    }
+
+    url = _format_url_section(url, **path_format_arguments)
+
+    # Construct headers
+    header_parameters = kwargs.pop("headers", {})  # type: Dict[str, Any]
+    header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
+
+    return HttpRequest(
+        method="GET",
+        url=url,
+        headers=header_parameters,
+        **kwargs
+    )
+
+# fmt: on
+class GetOperationStatusOperations(object):
+    """GetOperationStatusOperations operations.
+
+    You should not instantiate this class directly. Instead, you should create a Client instance that
+    instantiates it for you and attaches it as an attribute.
+
+    :ivar models: Alias to model classes used in this operation group.
+    :type models: ~azure.mgmt.machinelearningservices.models
+    :param client: Client for service requests.
+    :param config: Configuration of service client.
+    :param serializer: An object model serializer.
+    :param deserializer: An object model deserializer.
+    """
+
+    models = _models
+
+    def __init__(self, client, config, serializer, deserializer):
+        self._client = client
+        self._serialize = serializer
+        self._deserialize = deserializer
+        self._config = config
+
+    def _get_dataset_operation_status_initial(
+        self,
+        subscription_id,  # type: str
+        resource_group_name,  # type: str
+        workspace_name,  # type: str
+        operation_id,  # type: str
+        **kwargs  # type: Any
+    ):
+        # type: (...) -> Optional["_models.LongRunningOperationResponse1LongRunningOperationResponseObject"]
+        cls = kwargs.pop('cls', None)  # type: ClsType[Optional["_models.LongRunningOperationResponse1LongRunningOperationResponseObject"]]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        
+        request = build_get_dataset_operation_status_request_initial(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            operation_id=operation_id,
+            template_url=self._get_dataset_operation_status_initial.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200, 202]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            raise HttpResponseError(response=response, error_format=ARMErrorFormat)
+
+        deserialized = None
+        response_headers = {}
+        if response.status_code == 200:
+            deserialized = self._deserialize('LongRunningOperationResponse1LongRunningOperationResponseObject', pipeline_response)
+
+        if response.status_code == 202:
+            response_headers['Location']=self._deserialize('str', response.headers.get('Location'))
+            
+
+        if cls:
+            return cls(pipeline_response, deserialized, response_headers)
+
+        return deserialized
+
+    _get_dataset_operation_status_initial.metadata = {'url': '/dataset/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/operations/{operationId}'}  # type: ignore
+
+
+    @distributed_trace
+    def begin_get_dataset_operation_status(
+        self,
+        subscription_id,  # type: str
+        resource_group_name,  # type: str
+        workspace_name,  # type: str
+        operation_id,  # type: str
+        **kwargs  # type: Any
+    ):
+        # type: (...) -> LROPoller["_models.LongRunningOperationResponse1LongRunningOperationResponseObject"]
+        """get_dataset_operation_status.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param operation_id:
+        :type operation_id: str
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :keyword str continuation_token: A continuation token to restart a poller from a saved state.
+        :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
+         operation to not poll, or pass in your own initialized polling object for a personal polling
+         strategy.
+        :paramtype polling: bool or ~azure.core.polling.PollingMethod
+        :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
+         Retry-After header is present.
+        :return: An instance of LROPoller that returns either
+         LongRunningOperationResponse1LongRunningOperationResponseObject or the result of cls(response)
+        :rtype:
+         ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.LongRunningOperationResponse1LongRunningOperationResponseObject]
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        polling = kwargs.pop('polling', True)  # type: Union[bool, azure.core.polling.PollingMethod]
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.LongRunningOperationResponse1LongRunningOperationResponseObject"]
+        lro_delay = kwargs.pop(
+            'polling_interval',
+            self._config.polling_interval
+        )
+        cont_token = kwargs.pop('continuation_token', None)  # type: Optional[str]
+        if cont_token is None:
+            raw_result = self._get_dataset_operation_status_initial(
+                subscription_id=subscription_id,
+                resource_group_name=resource_group_name,
+                workspace_name=workspace_name,
+                operation_id=operation_id,
+                cls=lambda x,y,z: x,
+                **kwargs
+            )
+        kwargs.pop('error_map', None)
+
+        def get_long_running_output(pipeline_response):
+            response = pipeline_response.http_response
+            deserialized = self._deserialize('LongRunningOperationResponse1LongRunningOperationResponseObject', pipeline_response)
+            if cls:
+                return cls(pipeline_response, deserialized, {})
+            return deserialized
+
+
+        if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, **kwargs)
+        elif polling is False: polling_method = NoPolling()
+        else: polling_method = polling
+        if cont_token:
+            return LROPoller.from_continuation_token(
+                polling_method=polling_method,
+                continuation_token=cont_token,
+                client=self._client,
+                deserialization_callback=get_long_running_output
+            )
+        else:
+            return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
+
+    begin_get_dataset_operation_status.metadata = {'url': '/dataset/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/operations/{operationId}'}  # type: ignore
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/dataset_dataplane/py.typed b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/dataset_dataplane/py.typed
new file mode 100644
index 00000000..e5aff4f8
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/dataset_dataplane/py.typed
@@ -0,0 +1 @@
+# Marker file for PEP 561.
\ No newline at end of file