about summary refs log tree commit diff
path: root/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/model_dataplane/aio
diff options
context:
space:
mode:
Diffstat (limited to '.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/model_dataplane/aio')
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/model_dataplane/aio/__init__.py15
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/model_dataplane/aio/_azure_machine_learning_workspaces.py95
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/model_dataplane/aio/_configuration.py60
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/model_dataplane/aio/_patch.py31
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/model_dataplane/aio/operations/__init__.py19
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/model_dataplane/aio/operations/_assets_operations.py403
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/model_dataplane/aio/operations/_extensive_model_operations.py103
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/model_dataplane/aio/operations/_migration_operations.py99
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/model_dataplane/aio/operations/_models_operations.py875
9 files changed, 1700 insertions, 0 deletions
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/model_dataplane/aio/__init__.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/model_dataplane/aio/__init__.py
new file mode 100644
index 00000000..f67ccda9
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/model_dataplane/aio/__init__.py
@@ -0,0 +1,15 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from ._azure_machine_learning_workspaces import AzureMachineLearningWorkspaces
+__all__ = ['AzureMachineLearningWorkspaces']
+
+# `._patch.py` is used for handwritten extensions to the generated code
+# Example: https://github.com/Azure/azure-sdk-for-python/blob/main/doc/dev/customize_code/how-to-patch-sdk-code.md
+from ._patch import patch_sdk
+patch_sdk()
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/model_dataplane/aio/_azure_machine_learning_workspaces.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/model_dataplane/aio/_azure_machine_learning_workspaces.py
new file mode 100644
index 00000000..96732b90
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/model_dataplane/aio/_azure_machine_learning_workspaces.py
@@ -0,0 +1,95 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from copy import deepcopy
+from typing import Any, Awaitable, Optional, TYPE_CHECKING
+
+from azure.core.rest import AsyncHttpResponse, HttpRequest
+from azure.mgmt.core import AsyncARMPipelineClient
+from msrest import Deserializer, Serializer
+
+from .. import models
+from ._configuration import AzureMachineLearningWorkspacesConfiguration
+from .operations import AssetsOperations, ExtensiveModelOperations, MigrationOperations, ModelsOperations
+
+if TYPE_CHECKING:
+    # pylint: disable=unused-import,ungrouped-imports
+    from azure.core.credentials_async import AsyncTokenCredential
+
+class AzureMachineLearningWorkspaces:
+    """AzureMachineLearningWorkspaces.
+
+    :ivar assets: AssetsOperations operations
+    :vartype assets: azure.mgmt.machinelearningservices.aio.operations.AssetsOperations
+    :ivar extensive_model: ExtensiveModelOperations operations
+    :vartype extensive_model:
+     azure.mgmt.machinelearningservices.aio.operations.ExtensiveModelOperations
+    :ivar migration: MigrationOperations operations
+    :vartype migration: azure.mgmt.machinelearningservices.aio.operations.MigrationOperations
+    :ivar models: ModelsOperations operations
+    :vartype models: azure.mgmt.machinelearningservices.aio.operations.ModelsOperations
+    :param credential: Credential needed for the client to connect to Azure.
+    :type credential: ~azure.core.credentials_async.AsyncTokenCredential
+    :param base_url: Service URL. Default value is ''.
+    :type base_url: str
+    """
+
+    def __init__(
+        self,
+        credential: "AsyncTokenCredential",
+        base_url: str = "",
+        **kwargs: Any
+    ) -> None:
+        self._config = AzureMachineLearningWorkspacesConfiguration(credential=credential, **kwargs)
+        self._client = AsyncARMPipelineClient(base_url=base_url, config=self._config, **kwargs)
+
+        client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)}
+        self._serialize = Serializer(client_models)
+        self._deserialize = Deserializer(client_models)
+        self._serialize.client_side_validation = False
+        self.assets = AssetsOperations(self._client, self._config, self._serialize, self._deserialize)
+        self.extensive_model = ExtensiveModelOperations(self._client, self._config, self._serialize, self._deserialize)
+        self.migration = MigrationOperations(self._client, self._config, self._serialize, self._deserialize)
+        self.models = ModelsOperations(self._client, self._config, self._serialize, self._deserialize)
+
+
+    def _send_request(
+        self,
+        request: HttpRequest,
+        **kwargs: Any
+    ) -> Awaitable[AsyncHttpResponse]:
+        """Runs the network request through the client's chained policies.
+
+        >>> from azure.core.rest import HttpRequest
+        >>> request = HttpRequest("GET", "https://www.example.org/")
+        <HttpRequest [GET], url: 'https://www.example.org/'>
+        >>> response = await client._send_request(request)
+        <AsyncHttpResponse: 200 OK>
+
+        For more information on this code flow, see https://aka.ms/azsdk/python/protocol/quickstart
+
+        :param request: The network request you want to make. Required.
+        :type request: ~azure.core.rest.HttpRequest
+        :keyword bool stream: Whether the response payload will be streamed. Defaults to False.
+        :return: The response of your network call. Does not do error handling on your response.
+        :rtype: ~azure.core.rest.AsyncHttpResponse
+        """
+
+        request_copy = deepcopy(request)
+        request_copy.url = self._client.format_url(request_copy.url)
+        return self._client.send_request(request_copy, **kwargs)
+
+    async def close(self) -> None:
+        await self._client.close()
+
+    async def __aenter__(self) -> "AzureMachineLearningWorkspaces":
+        await self._client.__aenter__()
+        return self
+
+    async def __aexit__(self, *exc_details) -> None:
+        await self._client.__aexit__(*exc_details)
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/model_dataplane/aio/_configuration.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/model_dataplane/aio/_configuration.py
new file mode 100644
index 00000000..26def54e
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/model_dataplane/aio/_configuration.py
@@ -0,0 +1,60 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from typing import Any, TYPE_CHECKING
+
+from azure.core.configuration import Configuration
+from azure.core.pipeline import policies
+from azure.mgmt.core.policies import ARMHttpLoggingPolicy, AsyncARMChallengeAuthenticationPolicy
+
+from .._version import VERSION
+
+if TYPE_CHECKING:
+    # pylint: disable=unused-import,ungrouped-imports
+    from azure.core.credentials_async import AsyncTokenCredential
+
+
+class AzureMachineLearningWorkspacesConfiguration(Configuration):
+    """Configuration for AzureMachineLearningWorkspaces.
+
+    Note that all parameters used to create this instance are saved as instance
+    attributes.
+
+    :param credential: Credential needed for the client to connect to Azure.
+    :type credential: ~azure.core.credentials_async.AsyncTokenCredential
+    """
+
+    def __init__(
+        self,
+        credential: "AsyncTokenCredential",
+        **kwargs: Any
+    ) -> None:
+        super(AzureMachineLearningWorkspacesConfiguration, self).__init__(**kwargs)
+        if credential is None:
+            raise ValueError("Parameter 'credential' must not be None.")
+
+        self.credential = credential
+        self.credential_scopes = kwargs.pop('credential_scopes', ['https://management.azure.com/.default'])
+        kwargs.setdefault('sdk_moniker', 'mgmt-machinelearningservices/{}'.format(VERSION))
+        self._configure(**kwargs)
+
+    def _configure(
+        self,
+        **kwargs: Any
+    ) -> None:
+        self.user_agent_policy = kwargs.get('user_agent_policy') or policies.UserAgentPolicy(**kwargs)
+        self.headers_policy = kwargs.get('headers_policy') or policies.HeadersPolicy(**kwargs)
+        self.proxy_policy = kwargs.get('proxy_policy') or policies.ProxyPolicy(**kwargs)
+        self.logging_policy = kwargs.get('logging_policy') or policies.NetworkTraceLoggingPolicy(**kwargs)
+        self.http_logging_policy = kwargs.get('http_logging_policy') or ARMHttpLoggingPolicy(**kwargs)
+        self.retry_policy = kwargs.get('retry_policy') or policies.AsyncRetryPolicy(**kwargs)
+        self.custom_hook_policy = kwargs.get('custom_hook_policy') or policies.CustomHookPolicy(**kwargs)
+        self.redirect_policy = kwargs.get('redirect_policy') or policies.AsyncRedirectPolicy(**kwargs)
+        self.authentication_policy = kwargs.get('authentication_policy')
+        if self.credential and not self.authentication_policy:
+            self.authentication_policy = AsyncARMChallengeAuthenticationPolicy(self.credential, *self.credential_scopes, **kwargs)
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/model_dataplane/aio/_patch.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/model_dataplane/aio/_patch.py
new file mode 100644
index 00000000..74e48ecd
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/model_dataplane/aio/_patch.py
@@ -0,0 +1,31 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+#
+# Copyright (c) Microsoft Corporation. All rights reserved.
+#
+# The MIT License (MIT)
+#
+# Permission is hereby granted, free of charge, to any person obtaining a copy
+# of this software and associated documentation files (the ""Software""), to
+# deal in the Software without restriction, including without limitation the
+# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+# sell copies of the Software, and to permit persons to whom the Software is
+# furnished to do so, subject to the following conditions:
+#
+# The above copyright notice and this permission notice shall be included in
+# all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+# IN THE SOFTWARE.
+#
+# --------------------------------------------------------------------------
+
+# This file is used for handwritten extensions to the generated code. Example:
+# https://github.com/Azure/azure-sdk-for-python/blob/main/doc/dev/customize_code/how-to-patch-sdk-code.md
+def patch_sdk():
+    pass
\ No newline at end of file
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/model_dataplane/aio/operations/__init__.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/model_dataplane/aio/operations/__init__.py
new file mode 100644
index 00000000..261577d5
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/model_dataplane/aio/operations/__init__.py
@@ -0,0 +1,19 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from ._assets_operations import AssetsOperations
+from ._extensive_model_operations import ExtensiveModelOperations
+from ._migration_operations import MigrationOperations
+from ._models_operations import ModelsOperations
+
+__all__ = [
+    'AssetsOperations',
+    'ExtensiveModelOperations',
+    'MigrationOperations',
+    'ModelsOperations',
+]
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/model_dataplane/aio/operations/_assets_operations.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/model_dataplane/aio/operations/_assets_operations.py
new file mode 100644
index 00000000..20f7a4cb
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/model_dataplane/aio/operations/_assets_operations.py
@@ -0,0 +1,403 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+import functools
+from typing import Any, Callable, Dict, Generic, List, Optional, TypeVar, Union
+import warnings
+
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import AsyncHttpResponse
+from azure.core.rest import HttpRequest
+from azure.core.tracing.decorator_async import distributed_trace_async
+from azure.mgmt.core.exceptions import ARMErrorFormat
+
+from ... import models as _models
+from ..._vendor import _convert_request
+from ...operations._assets_operations import build_create_request, build_delete_request, build_list_request, build_patch_request, build_query_by_id_request
+T = TypeVar('T')
+ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
+
+class AssetsOperations:
+    """AssetsOperations async operations.
+
+    You should not instantiate this class directly. Instead, you should create a Client instance that
+    instantiates it for you and attaches it as an attribute.
+
+    :ivar models: Alias to model classes used in this operation group.
+    :type models: ~azure.mgmt.machinelearningservices.models
+    :param client: Client for service requests.
+    :param config: Configuration of service client.
+    :param serializer: An object model serializer.
+    :param deserializer: An object model deserializer.
+    """
+
+    models = _models
+
+    def __init__(self, client, config, serializer, deserializer) -> None:
+        self._client = client
+        self._serialize = serializer
+        self._deserialize = deserializer
+        self._config = config
+
+    @distributed_trace_async
+    async def create(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        body: Optional["_models.Asset"] = None,
+        **kwargs: Any
+    ) -> "_models.Asset":
+        """create.
+
+        :param subscription_id:
+        :type subscription_id: str
+        :param resource_group_name:
+        :type resource_group_name: str
+        :param workspace_name:
+        :type workspace_name: str
+        :param body:
+        :type body: ~azure.mgmt.machinelearningservices.models.Asset
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: Asset, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.Asset
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.Asset"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        content_type = kwargs.pop('content_type', "application/json-patch+json")  # type: Optional[str]
+
+        _json = None
+        _content = None
+        if content_type.split(";")[0] in ['application/json', 'text/json']:
+            if body is not None:
+                _json = self._serialize.body(body, 'Asset')
+        elif content_type.split(";")[0] in ['application/json-patch+json', 'application/*+json']:
+            if body is not None:
+                _json = self._serialize.body(body, 'Asset')
+        else:
+            raise ValueError(
+                "The content_type '{}' is not one of the allowed values: "
+                "['application/json-patch+json', 'application/json', 'text/json', 'application/*+json']".format(content_type)
+            )
+
+        request = build_create_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            content_type=content_type,
+            json=_json,
+            content=_content,
+            template_url=self.create.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            raise HttpResponseError(response=response, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('Asset', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    create.metadata = {'url': '/modelregistry/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/assets'}  # type: ignore
+
+
+    @distributed_trace_async
+    async def list(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        run_id: Optional[str] = None,
+        project_id: Optional[str] = None,
+        name: Optional[str] = None,
+        tag: Optional[str] = None,
+        count: Optional[int] = None,
+        skip_token: Optional[str] = None,
+        tags: Optional[str] = None,
+        properties: Optional[str] = None,
+        type: Optional[str] = None,
+        orderby: Optional[Union[str, "_models.OrderString"]] = None,
+        **kwargs: Any
+    ) -> "_models.AssetPaginatedResult":
+        """list.
+
+        :param subscription_id:
+        :type subscription_id: str
+        :param resource_group_name:
+        :type resource_group_name: str
+        :param workspace_name:
+        :type workspace_name: str
+        :param run_id:
+        :type run_id: str
+        :param project_id:
+        :type project_id: str
+        :param name:
+        :type name: str
+        :param tag:
+        :type tag: str
+        :param count:
+        :type count: int
+        :param skip_token:
+        :type skip_token: str
+        :param tags:
+        :type tags: str
+        :param properties:
+        :type properties: str
+        :param type:
+        :type type: str
+        :param orderby:
+        :type orderby: str or ~azure.mgmt.machinelearningservices.models.OrderString
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: AssetPaginatedResult, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.AssetPaginatedResult
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.AssetPaginatedResult"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        
+        request = build_list_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            run_id=run_id,
+            project_id=project_id,
+            name=name,
+            tag=tag,
+            count=count,
+            skip_token=skip_token,
+            tags=tags,
+            properties=properties,
+            type=type,
+            orderby=orderby,
+            template_url=self.list.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            raise HttpResponseError(response=response, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('AssetPaginatedResult', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    list.metadata = {'url': '/modelregistry/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/assets'}  # type: ignore
+
+
+    @distributed_trace_async
+    async def patch(
+        self,
+        id: str,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        body: List["_models.Operation"],
+        **kwargs: Any
+    ) -> "_models.Asset":
+        """patch.
+
+        :param id:
+        :type id: str
+        :param subscription_id:
+        :type subscription_id: str
+        :param resource_group_name:
+        :type resource_group_name: str
+        :param workspace_name:
+        :type workspace_name: str
+        :param body:
+        :type body: list[~azure.mgmt.machinelearningservices.models.Operation]
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: Asset, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.Asset
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.Asset"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        content_type = kwargs.pop('content_type', "application/json-patch+json")  # type: Optional[str]
+
+        _json = None
+        _content = None
+        if content_type.split(";")[0] in ['application/json', 'text/json']:
+            _json = self._serialize.body(body, '[Operation]')
+        elif content_type.split(";")[0] in ['application/json-patch+json', 'application/*+json']:
+            _json = self._serialize.body(body, '[Operation]')
+        else:
+            raise ValueError(
+                "The content_type '{}' is not one of the allowed values: "
+                "['application/json-patch+json', 'application/json', 'text/json', 'application/*+json']".format(content_type)
+            )
+
+        request = build_patch_request(
+            id=id,
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            content_type=content_type,
+            json=_json,
+            content=_content,
+            template_url=self.patch.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            raise HttpResponseError(response=response, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('Asset', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    patch.metadata = {'url': '/modelregistry/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/assets/{id}'}  # type: ignore
+
+
+    @distributed_trace_async
+    async def delete(
+        self,
+        id: str,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        **kwargs: Any
+    ) -> None:
+        """delete.
+
+        :param id:
+        :type id: str
+        :param subscription_id:
+        :type subscription_id: str
+        :param resource_group_name:
+        :type resource_group_name: str
+        :param workspace_name:
+        :type workspace_name: str
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: None, or the result of cls(response)
+        :rtype: None
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType[None]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        
+        request = build_delete_request(
+            id=id,
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            template_url=self.delete.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200, 204]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            raise HttpResponseError(response=response, error_format=ARMErrorFormat)
+
+        if cls:
+            return cls(pipeline_response, None, {})
+
+    delete.metadata = {'url': '/modelregistry/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/assets/{id}'}  # type: ignore
+
+
+    @distributed_trace_async
+    async def query_by_id(
+        self,
+        id: str,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        **kwargs: Any
+    ) -> "_models.Asset":
+        """query_by_id.
+
+        :param id:
+        :type id: str
+        :param subscription_id:
+        :type subscription_id: str
+        :param resource_group_name:
+        :type resource_group_name: str
+        :param workspace_name:
+        :type workspace_name: str
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: Asset, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.Asset
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.Asset"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        
+        request = build_query_by_id_request(
+            id=id,
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            template_url=self.query_by_id.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            raise HttpResponseError(response=response, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('Asset', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    query_by_id.metadata = {'url': '/modelregistry/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/assets/{id}'}  # type: ignore
+
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/model_dataplane/aio/operations/_extensive_model_operations.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/model_dataplane/aio/operations/_extensive_model_operations.py
new file mode 100644
index 00000000..6f821f49
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/model_dataplane/aio/operations/_extensive_model_operations.py
@@ -0,0 +1,103 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+import functools
+from typing import Any, Callable, Dict, Generic, Optional, TypeVar
+import warnings
+
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import AsyncHttpResponse
+from azure.core.rest import HttpRequest
+from azure.core.tracing.decorator_async import distributed_trace_async
+from azure.mgmt.core.exceptions import ARMErrorFormat
+
+from ... import models as _models
+from ..._vendor import _convert_request
+from ...operations._extensive_model_operations import build_query_by_id_request
+T = TypeVar('T')
+ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
+
+class ExtensiveModelOperations:
+    """ExtensiveModelOperations async operations.
+
+    You should not instantiate this class directly. Instead, you should create a Client instance that
+    instantiates it for you and attaches it as an attribute.
+
+    :ivar models: Alias to model classes used in this operation group.
+    :type models: ~azure.mgmt.machinelearningservices.models
+    :param client: Client for service requests.
+    :param config: Configuration of service client.
+    :param serializer: An object model serializer.
+    :param deserializer: An object model deserializer.
+    """
+
+    models = _models
+
+    def __init__(self, client, config, serializer, deserializer) -> None:
+        self._client = client
+        self._serialize = serializer
+        self._deserialize = deserializer
+        self._config = config
+
+    @distributed_trace_async
+    async def query_by_id(
+        self,
+        id: str,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        **kwargs: Any
+    ) -> "_models.ExtensiveModel":
+        """query_by_id.
+
+        :param id:
+        :type id: str
+        :param subscription_id:
+        :type subscription_id: str
+        :param resource_group_name:
+        :type resource_group_name: str
+        :param workspace_name:
+        :type workspace_name: str
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: ExtensiveModel, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.ExtensiveModel
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.ExtensiveModel"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        
+        request = build_query_by_id_request(
+            id=id,
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            template_url=self.query_by_id.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            raise HttpResponseError(response=response, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('ExtensiveModel', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    query_by_id.metadata = {'url': '/modelregistry/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/extensiveModels/{id}'}  # type: ignore
+
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/model_dataplane/aio/operations/_migration_operations.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/model_dataplane/aio/operations/_migration_operations.py
new file mode 100644
index 00000000..b6c4b7e4
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/model_dataplane/aio/operations/_migration_operations.py
@@ -0,0 +1,99 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+import functools
+from typing import Any, Callable, Dict, Generic, Optional, TypeVar
+import warnings
+
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import AsyncHttpResponse
+from azure.core.rest import HttpRequest
+from azure.core.tracing.decorator_async import distributed_trace_async
+from azure.mgmt.core.exceptions import ARMErrorFormat
+
+from ... import models as _models
+from ..._vendor import _convert_request
+from ...operations._migration_operations import build_start_migration_request
+T = TypeVar('T')
+ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
+
+class MigrationOperations:
+    """MigrationOperations async operations.
+
+    You should not instantiate this class directly. Instead, you should create a Client instance that
+    instantiates it for you and attaches it as an attribute.
+
+    :ivar models: Alias to model classes used in this operation group.
+    :type models: ~azure.mgmt.machinelearningservices.models
+    :param client: Client for service requests.
+    :param config: Configuration of service client.
+    :param serializer: An object model serializer.
+    :param deserializer: An object model deserializer.
+    """
+
+    models = _models
+
+    def __init__(self, client, config, serializer, deserializer) -> None:
+        self._client = client
+        self._serialize = serializer
+        self._deserialize = deserializer
+        self._config = config
+
+    @distributed_trace_async
+    async def start_migration(
+        self,
+        migration: Optional[str] = None,
+        timeout: Optional[str] = "00:01:00",
+        collection_id: Optional[str] = None,
+        workspace_id: Optional[str] = None,
+        **kwargs: Any
+    ) -> None:
+        """start_migration.
+
+        :param migration:
+        :type migration: str
+        :param timeout:
+        :type timeout: str
+        :param collection_id:
+        :type collection_id: str
+        :param workspace_id:
+        :type workspace_id: str
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: None, or the result of cls(response)
+        :rtype: None
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType[None]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        
+        request = build_start_migration_request(
+            migration=migration,
+            timeout=timeout,
+            collection_id=collection_id,
+            workspace_id=workspace_id,
+            template_url=self.start_migration.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            raise HttpResponseError(response=response, error_format=ARMErrorFormat)
+
+        if cls:
+            return cls(pipeline_response, None, {})
+
+    start_migration.metadata = {'url': '/modelregistry/v1.0/meta/migration'}  # type: ignore
+
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/model_dataplane/aio/operations/_models_operations.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/model_dataplane/aio/operations/_models_operations.py
new file mode 100644
index 00000000..f666dcec
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/model_dataplane/aio/operations/_models_operations.py
@@ -0,0 +1,875 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+import functools
+from typing import Any, Callable, Dict, Generic, List, Optional, TypeVar, Union
+import warnings
+
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import AsyncHttpResponse
+from azure.core.rest import HttpRequest
+from azure.core.tracing.decorator_async import distributed_trace_async
+from azure.mgmt.core.exceptions import ARMErrorFormat
+
+from ... import models as _models
+from ..._vendor import _convert_request
+from ...operations._models_operations import build_batch_get_resolved_uris_request, build_batch_query_request, build_create_unregistered_input_model_request, build_create_unregistered_output_model_request, build_delete_request, build_deployment_settings_request, build_list_query_post_request, build_list_request, build_patch_request, build_query_by_id_request, build_register_request
+T = TypeVar('T')
+ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
+
+class ModelsOperations:
+    """ModelsOperations async operations.
+
+    You should not instantiate this class directly. Instead, you should create a Client instance that
+    instantiates it for you and attaches it as an attribute.
+
+    :ivar models: Alias to model classes used in this operation group.
+    :type models: ~azure.mgmt.machinelearningservices.models
+    :param client: Client for service requests.
+    :param config: Configuration of service client.
+    :param serializer: An object model serializer.
+    :param deserializer: An object model deserializer.
+    """
+
+    models = _models
+
+    def __init__(self, client, config, serializer, deserializer) -> None:
+        self._client = client
+        self._serialize = serializer
+        self._deserialize = deserializer
+        self._config = config
+
+    @distributed_trace_async
+    async def register(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        body: "_models.Model",
+        auto_version: Optional[bool] = True,
+        **kwargs: Any
+    ) -> "_models.Model":
+        """register.
+
+        :param subscription_id:
+        :type subscription_id: str
+        :param resource_group_name:
+        :type resource_group_name: str
+        :param workspace_name:
+        :type workspace_name: str
+        :param body:
+        :type body: ~azure.mgmt.machinelearningservices.models.Model
+        :param auto_version:
+        :type auto_version: bool
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: Model, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.Model
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.Model"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        content_type = kwargs.pop('content_type', "application/json-patch+json")  # type: Optional[str]
+
+        _json = None
+        _content = None
+        if content_type.split(";")[0] in ['application/json', 'text/json']:
+            _json = self._serialize.body(body, 'Model')
+        elif content_type.split(";")[0] in ['application/json-patch+json', 'application/*+json']:
+            _json = self._serialize.body(body, 'Model')
+        else:
+            raise ValueError(
+                "The content_type '{}' is not one of the allowed values: "
+                "['application/json-patch+json', 'application/json', 'text/json', 'application/*+json']".format(content_type)
+            )
+
+        request = build_register_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            content_type=content_type,
+            json=_json,
+            content=_content,
+            auto_version=auto_version,
+            template_url=self.register.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            raise HttpResponseError(response=response, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('Model', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    register.metadata = {'url': '/modelregistry/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/models'}  # type: ignore
+
+
+    @distributed_trace_async
+    async def list(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        name: Optional[str] = None,
+        tag: Optional[str] = None,
+        version: Optional[str] = None,
+        framework: Optional[str] = None,
+        description: Optional[str] = None,
+        count: Optional[int] = None,
+        offset: Optional[int] = None,
+        skip_token: Optional[str] = None,
+        tags: Optional[str] = None,
+        properties: Optional[str] = None,
+        run_id: Optional[str] = None,
+        dataset_id: Optional[str] = None,
+        order_by: Optional[str] = None,
+        latest_version_only: Optional[bool] = False,
+        feed: Optional[str] = None,
+        list_view_type: Optional[Union[str, "_models.ListViewType"]] = None,
+        **kwargs: Any
+    ) -> "_models.ModelPagedResponse":
+        """list.
+
+        :param subscription_id:
+        :type subscription_id: str
+        :param resource_group_name:
+        :type resource_group_name: str
+        :param workspace_name:
+        :type workspace_name: str
+        :param name:
+        :type name: str
+        :param tag:
+        :type tag: str
+        :param version:
+        :type version: str
+        :param framework:
+        :type framework: str
+        :param description:
+        :type description: str
+        :param count:
+        :type count: int
+        :param offset:
+        :type offset: int
+        :param skip_token:
+        :type skip_token: str
+        :param tags:
+        :type tags: str
+        :param properties:
+        :type properties: str
+        :param run_id:
+        :type run_id: str
+        :param dataset_id:
+        :type dataset_id: str
+        :param order_by:
+        :type order_by: str
+        :param latest_version_only:
+        :type latest_version_only: bool
+        :param feed:
+        :type feed: str
+        :param list_view_type:
+        :type list_view_type: str or ~azure.mgmt.machinelearningservices.models.ListViewType
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: ModelPagedResponse, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.ModelPagedResponse
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.ModelPagedResponse"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        
+        request = build_list_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            name=name,
+            tag=tag,
+            version=version,
+            framework=framework,
+            description=description,
+            count=count,
+            offset=offset,
+            skip_token=skip_token,
+            tags=tags,
+            properties=properties,
+            run_id=run_id,
+            dataset_id=dataset_id,
+            order_by=order_by,
+            latest_version_only=latest_version_only,
+            feed=feed,
+            list_view_type=list_view_type,
+            template_url=self.list.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            raise HttpResponseError(response=response, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('ModelPagedResponse', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    list.metadata = {'url': '/modelregistry/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/models'}  # type: ignore
+
+
+    @distributed_trace_async
+    async def create_unregistered_input_model(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        body: "_models.CreateUnregisteredInputModelDto",
+        **kwargs: Any
+    ) -> "_models.Model":
+        """create_unregistered_input_model.
+
+        :param subscription_id:
+        :type subscription_id: str
+        :param resource_group_name:
+        :type resource_group_name: str
+        :param workspace_name:
+        :type workspace_name: str
+        :param body:
+        :type body: ~azure.mgmt.machinelearningservices.models.CreateUnregisteredInputModelDto
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: Model, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.Model
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.Model"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        content_type = kwargs.pop('content_type', "application/json-patch+json")  # type: Optional[str]
+
+        _json = None
+        _content = None
+        if content_type.split(";")[0] in ['application/json', 'text/json']:
+            _json = self._serialize.body(body, 'CreateUnregisteredInputModelDto')
+        elif content_type.split(";")[0] in ['application/json-patch+json', 'application/*+json']:
+            _json = self._serialize.body(body, 'CreateUnregisteredInputModelDto')
+        else:
+            raise ValueError(
+                "The content_type '{}' is not one of the allowed values: "
+                "['application/json-patch+json', 'application/json', 'text/json', 'application/*+json']".format(content_type)
+            )
+
+        request = build_create_unregistered_input_model_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            content_type=content_type,
+            json=_json,
+            content=_content,
+            template_url=self.create_unregistered_input_model.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            raise HttpResponseError(response=response, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('Model', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    create_unregistered_input_model.metadata = {'url': '/modelregistry/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/models/createUnregisteredInput'}  # type: ignore
+
+
+    @distributed_trace_async
+    async def create_unregistered_output_model(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        body: "_models.CreateUnregisteredOutputModelDto",
+        **kwargs: Any
+    ) -> "_models.Model":
+        """create_unregistered_output_model.
+
+        :param subscription_id:
+        :type subscription_id: str
+        :param resource_group_name:
+        :type resource_group_name: str
+        :param workspace_name:
+        :type workspace_name: str
+        :param body:
+        :type body: ~azure.mgmt.machinelearningservices.models.CreateUnregisteredOutputModelDto
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: Model, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.Model
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.Model"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        content_type = kwargs.pop('content_type', "application/json-patch+json")  # type: Optional[str]
+
+        _json = None
+        _content = None
+        if content_type.split(";")[0] in ['application/json', 'text/json']:
+            _json = self._serialize.body(body, 'CreateUnregisteredOutputModelDto')
+        elif content_type.split(";")[0] in ['application/json-patch+json', 'application/*+json']:
+            _json = self._serialize.body(body, 'CreateUnregisteredOutputModelDto')
+        else:
+            raise ValueError(
+                "The content_type '{}' is not one of the allowed values: "
+                "['application/json-patch+json', 'application/json', 'text/json', 'application/*+json']".format(content_type)
+            )
+
+        request = build_create_unregistered_output_model_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            content_type=content_type,
+            json=_json,
+            content=_content,
+            template_url=self.create_unregistered_output_model.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            raise HttpResponseError(response=response, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('Model', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    create_unregistered_output_model.metadata = {'url': '/modelregistry/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/models/createUnregisteredOutput'}  # type: ignore
+
+
+    @distributed_trace_async
+    async def batch_get_resolved_uris(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        body: Optional["_models.BatchGetResolvedUrisDto"] = None,
+        **kwargs: Any
+    ) -> "_models.BatchModelPathResponseDto":
+        """batch_get_resolved_uris.
+
+        :param subscription_id:
+        :type subscription_id: str
+        :param resource_group_name:
+        :type resource_group_name: str
+        :param workspace_name:
+        :type workspace_name: str
+        :param body:
+        :type body: ~azure.mgmt.machinelearningservices.models.BatchGetResolvedUrisDto
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: BatchModelPathResponseDto, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.BatchModelPathResponseDto
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.BatchModelPathResponseDto"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        content_type = kwargs.pop('content_type', "application/json-patch+json")  # type: Optional[str]
+
+        _json = None
+        _content = None
+        if content_type.split(";")[0] in ['application/json', 'text/json']:
+            if body is not None:
+                _json = self._serialize.body(body, 'BatchGetResolvedUrisDto')
+        elif content_type.split(";")[0] in ['application/json-patch+json', 'application/*+json']:
+            if body is not None:
+                _json = self._serialize.body(body, 'BatchGetResolvedUrisDto')
+        else:
+            raise ValueError(
+                "The content_type '{}' is not one of the allowed values: "
+                "['application/json-patch+json', 'application/json', 'text/json', 'application/*+json']".format(content_type)
+            )
+
+        request = build_batch_get_resolved_uris_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            content_type=content_type,
+            json=_json,
+            content=_content,
+            template_url=self.batch_get_resolved_uris.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            raise HttpResponseError(response=response, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('BatchModelPathResponseDto', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    batch_get_resolved_uris.metadata = {'url': '/modelregistry/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/models/batchGetResolvedUris'}  # type: ignore
+
+
+    @distributed_trace_async
+    async def query_by_id(
+        self,
+        id: str,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        include_deployment_settings: Optional[bool] = False,
+        **kwargs: Any
+    ) -> "_models.Model":
+        """query_by_id.
+
+        :param id:
+        :type id: str
+        :param subscription_id:
+        :type subscription_id: str
+        :param resource_group_name:
+        :type resource_group_name: str
+        :param workspace_name:
+        :type workspace_name: str
+        :param include_deployment_settings:
+        :type include_deployment_settings: bool
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: Model, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.Model
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.Model"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        
+        request = build_query_by_id_request(
+            id=id,
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            include_deployment_settings=include_deployment_settings,
+            template_url=self.query_by_id.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            raise HttpResponseError(response=response, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('Model', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    query_by_id.metadata = {'url': '/modelregistry/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/models/{id}'}  # type: ignore
+
+
+    @distributed_trace_async
+    async def delete(
+        self,
+        id: str,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        **kwargs: Any
+    ) -> None:
+        """delete.
+
+        :param id:
+        :type id: str
+        :param subscription_id:
+        :type subscription_id: str
+        :param resource_group_name:
+        :type resource_group_name: str
+        :param workspace_name:
+        :type workspace_name: str
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: None, or the result of cls(response)
+        :rtype: None
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType[None]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        
+        request = build_delete_request(
+            id=id,
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            template_url=self.delete.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200, 204]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            raise HttpResponseError(response=response, error_format=ARMErrorFormat)
+
+        if cls:
+            return cls(pipeline_response, None, {})
+
+    delete.metadata = {'url': '/modelregistry/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/models/{id}'}  # type: ignore
+
+
+    @distributed_trace_async
+    async def patch(
+        self,
+        id: str,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        body: List["_models.Operation"],
+        **kwargs: Any
+    ) -> "_models.Model":
+        """patch.
+
+        :param id:
+        :type id: str
+        :param subscription_id:
+        :type subscription_id: str
+        :param resource_group_name:
+        :type resource_group_name: str
+        :param workspace_name:
+        :type workspace_name: str
+        :param body:
+        :type body: list[~azure.mgmt.machinelearningservices.models.Operation]
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: Model, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.Model
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.Model"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        content_type = kwargs.pop('content_type', "application/json-patch+json")  # type: Optional[str]
+
+        _json = None
+        _content = None
+        if content_type.split(";")[0] in ['application/json', 'text/json']:
+            _json = self._serialize.body(body, '[Operation]')
+        elif content_type.split(";")[0] in ['application/json-patch+json', 'application/*+json']:
+            _json = self._serialize.body(body, '[Operation]')
+        else:
+            raise ValueError(
+                "The content_type '{}' is not one of the allowed values: "
+                "['application/json-patch+json', 'application/json', 'text/json', 'application/*+json']".format(content_type)
+            )
+
+        request = build_patch_request(
+            id=id,
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            content_type=content_type,
+            json=_json,
+            content=_content,
+            template_url=self.patch.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            raise HttpResponseError(response=response, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('Model', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    patch.metadata = {'url': '/modelregistry/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/models/{id}'}  # type: ignore
+
+
+    @distributed_trace_async
+    async def list_query_post(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        body: Optional["_models.ListModelsRequest"] = None,
+        **kwargs: Any
+    ) -> "_models.ModelListModelsRequestPagedResponse":
+        """list_query_post.
+
+        :param subscription_id:
+        :type subscription_id: str
+        :param resource_group_name:
+        :type resource_group_name: str
+        :param workspace_name:
+        :type workspace_name: str
+        :param body:
+        :type body: ~azure.mgmt.machinelearningservices.models.ListModelsRequest
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: ModelListModelsRequestPagedResponse, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.ModelListModelsRequestPagedResponse
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.ModelListModelsRequestPagedResponse"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        content_type = kwargs.pop('content_type', "application/json-patch+json")  # type: Optional[str]
+
+        _json = None
+        _content = None
+        if content_type.split(";")[0] in ['application/json', 'text/json']:
+            if body is not None:
+                _json = self._serialize.body(body, 'ListModelsRequest')
+        elif content_type.split(";")[0] in ['application/json-patch+json', 'application/*+json']:
+            if body is not None:
+                _json = self._serialize.body(body, 'ListModelsRequest')
+        else:
+            raise ValueError(
+                "The content_type '{}' is not one of the allowed values: "
+                "['application/json-patch+json', 'application/json', 'text/json', 'application/*+json']".format(content_type)
+            )
+
+        request = build_list_query_post_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            content_type=content_type,
+            json=_json,
+            content=_content,
+            template_url=self.list_query_post.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            raise HttpResponseError(response=response, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('ModelListModelsRequestPagedResponse', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    list_query_post.metadata = {'url': '/modelregistry/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/models/list'}  # type: ignore
+
+
+    @distributed_trace_async
+    async def batch_query(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        body: Optional["_models.ModelBatchDto"] = None,
+        **kwargs: Any
+    ) -> "_models.ModelBatchResponseDto":
+        """batch_query.
+
+        :param subscription_id:
+        :type subscription_id: str
+        :param resource_group_name:
+        :type resource_group_name: str
+        :param workspace_name:
+        :type workspace_name: str
+        :param body:
+        :type body: ~azure.mgmt.machinelearningservices.models.ModelBatchDto
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: ModelBatchResponseDto, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.ModelBatchResponseDto
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.ModelBatchResponseDto"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        content_type = kwargs.pop('content_type', "application/json-patch+json")  # type: Optional[str]
+
+        _json = None
+        _content = None
+        if content_type.split(";")[0] in ['application/json', 'text/json']:
+            if body is not None:
+                _json = self._serialize.body(body, 'ModelBatchDto')
+        elif content_type.split(";")[0] in ['application/json-patch+json', 'application/*+json']:
+            if body is not None:
+                _json = self._serialize.body(body, 'ModelBatchDto')
+        else:
+            raise ValueError(
+                "The content_type '{}' is not one of the allowed values: "
+                "['application/json-patch+json', 'application/json', 'text/json', 'application/*+json']".format(content_type)
+            )
+
+        request = build_batch_query_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            content_type=content_type,
+            json=_json,
+            content=_content,
+            template_url=self.batch_query.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            raise HttpResponseError(response=response, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('ModelBatchResponseDto', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    batch_query.metadata = {'url': '/modelregistry/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/models/querybatch'}  # type: ignore
+
+
+    @distributed_trace_async
+    async def deployment_settings(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        body: Optional["_models.ModelSettingsIdentifiers"] = None,
+        **kwargs: Any
+    ) -> None:
+        """deployment_settings.
+
+        :param subscription_id:
+        :type subscription_id: str
+        :param resource_group_name:
+        :type resource_group_name: str
+        :param workspace_name:
+        :type workspace_name: str
+        :param body:
+        :type body: ~azure.mgmt.machinelearningservices.models.ModelSettingsIdentifiers
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: None, or the result of cls(response)
+        :rtype: None
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType[None]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        content_type = kwargs.pop('content_type', "application/json-patch+json")  # type: Optional[str]
+
+        _json = None
+        _content = None
+        if content_type.split(";")[0] in ['application/json', 'text/json']:
+            if body is not None:
+                _json = self._serialize.body(body, 'ModelSettingsIdentifiers')
+        elif content_type.split(";")[0] in ['application/json-patch+json', 'application/*+json']:
+            if body is not None:
+                _json = self._serialize.body(body, 'ModelSettingsIdentifiers')
+        else:
+            raise ValueError(
+                "The content_type '{}' is not one of the allowed values: "
+                "['application/json-patch+json', 'application/json', 'text/json', 'application/*+json']".format(content_type)
+            )
+
+        request = build_deployment_settings_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            content_type=content_type,
+            json=_json,
+            content=_content,
+            template_url=self.deployment_settings.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            raise HttpResponseError(response=response, error_format=ARMErrorFormat)
+
+        if cls:
+            return cls(pipeline_response, None, {})
+
+    deployment_settings.metadata = {'url': '/modelregistry/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/models/deploymentSettings'}  # type: ignore
+